repo_id
stringclasses
875 values
size
int64
974
38.9k
file_path
stringlengths
10
308
content
stringlengths
974
38.9k
googleapis/google-cloud-java
36,020
java-channel/proto-google-cloud-channel-v1/src/main/java/com/google/cloud/channel/v1/BillableSku.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/channel/v1/service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.channel.v1; /** * * * <pre> * Represents the Billable SKU information. * </pre> * * Protobuf type {@code google.cloud.channel.v1.BillableSku} */ public final class BillableSku extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.channel.v1.BillableSku) BillableSkuOrBuilder { private static final long serialVersionUID = 0L; // Use BillableSku.newBuilder() to construct. private BillableSku(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private BillableSku() { sku_ = ""; skuDisplayName_ = ""; service_ = ""; serviceDisplayName_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new BillableSku(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_BillableSku_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_BillableSku_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.channel.v1.BillableSku.class, com.google.cloud.channel.v1.BillableSku.Builder.class); } public static final int SKU_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object sku_ = ""; /** * * * <pre> * Resource name of Billable SKU. Format: * billableSkus/{sku}. * Example: * billableSkus/6E1B-6634-470F". * </pre> * * <code>string sku = 1;</code> * * @return The sku. */ @java.lang.Override public java.lang.String getSku() { java.lang.Object ref = sku_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); sku_ = s; return s; } } /** * * * <pre> * Resource name of Billable SKU. Format: * billableSkus/{sku}. * Example: * billableSkus/6E1B-6634-470F". * </pre> * * <code>string sku = 1;</code> * * @return The bytes for sku. */ @java.lang.Override public com.google.protobuf.ByteString getSkuBytes() { java.lang.Object ref = sku_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); sku_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int SKU_DISPLAY_NAME_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object skuDisplayName_ = ""; /** * * * <pre> * Unique human readable name for the SKU. * </pre> * * <code>string sku_display_name = 2;</code> * * @return The skuDisplayName. */ @java.lang.Override public java.lang.String getSkuDisplayName() { java.lang.Object ref = skuDisplayName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); skuDisplayName_ = s; return s; } } /** * * * <pre> * Unique human readable name for the SKU. * </pre> * * <code>string sku_display_name = 2;</code> * * @return The bytes for skuDisplayName. */ @java.lang.Override public com.google.protobuf.ByteString getSkuDisplayNameBytes() { java.lang.Object ref = skuDisplayName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); skuDisplayName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int SERVICE_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object service_ = ""; /** * * * <pre> * Resource name of Service which contains Repricing SKU. Format: * services/{service}. * Example: * "services/B7D9-FDCB-15D8". * </pre> * * <code>string service = 3;</code> * * @return The service. */ @java.lang.Override public java.lang.String getService() { java.lang.Object ref = service_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); service_ = s; return s; } } /** * * * <pre> * Resource name of Service which contains Repricing SKU. Format: * services/{service}. * Example: * "services/B7D9-FDCB-15D8". * </pre> * * <code>string service = 3;</code> * * @return The bytes for service. */ @java.lang.Override public com.google.protobuf.ByteString getServiceBytes() { java.lang.Object ref = service_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); service_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int SERVICE_DISPLAY_NAME_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object serviceDisplayName_ = ""; /** * * * <pre> * Unique human readable name for the Service. * </pre> * * <code>string service_display_name = 4;</code> * * @return The serviceDisplayName. */ @java.lang.Override public java.lang.String getServiceDisplayName() { java.lang.Object ref = serviceDisplayName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); serviceDisplayName_ = s; return s; } } /** * * * <pre> * Unique human readable name for the Service. * </pre> * * <code>string service_display_name = 4;</code> * * @return The bytes for serviceDisplayName. */ @java.lang.Override public com.google.protobuf.ByteString getServiceDisplayNameBytes() { java.lang.Object ref = serviceDisplayName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); serviceDisplayName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sku_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, sku_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(skuDisplayName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, skuDisplayName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(service_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, service_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serviceDisplayName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, serviceDisplayName_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sku_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, sku_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(skuDisplayName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, skuDisplayName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(service_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, service_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serviceDisplayName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, serviceDisplayName_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.channel.v1.BillableSku)) { return super.equals(obj); } com.google.cloud.channel.v1.BillableSku other = (com.google.cloud.channel.v1.BillableSku) obj; if (!getSku().equals(other.getSku())) return false; if (!getSkuDisplayName().equals(other.getSkuDisplayName())) return false; if (!getService().equals(other.getService())) return false; if (!getServiceDisplayName().equals(other.getServiceDisplayName())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + SKU_FIELD_NUMBER; hash = (53 * hash) + getSku().hashCode(); hash = (37 * hash) + SKU_DISPLAY_NAME_FIELD_NUMBER; hash = (53 * hash) + getSkuDisplayName().hashCode(); hash = (37 * hash) + SERVICE_FIELD_NUMBER; hash = (53 * hash) + getService().hashCode(); hash = (37 * hash) + SERVICE_DISPLAY_NAME_FIELD_NUMBER; hash = (53 * hash) + getServiceDisplayName().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.channel.v1.BillableSku parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.channel.v1.BillableSku parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.channel.v1.BillableSku parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.channel.v1.BillableSku parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.channel.v1.BillableSku parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.channel.v1.BillableSku parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.channel.v1.BillableSku parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.channel.v1.BillableSku parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.channel.v1.BillableSku parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.channel.v1.BillableSku parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.channel.v1.BillableSku parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.channel.v1.BillableSku parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.channel.v1.BillableSku prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Represents the Billable SKU information. * </pre> * * Protobuf type {@code google.cloud.channel.v1.BillableSku} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.channel.v1.BillableSku) com.google.cloud.channel.v1.BillableSkuOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_BillableSku_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_BillableSku_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.channel.v1.BillableSku.class, com.google.cloud.channel.v1.BillableSku.Builder.class); } // Construct using com.google.cloud.channel.v1.BillableSku.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; sku_ = ""; skuDisplayName_ = ""; service_ = ""; serviceDisplayName_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_BillableSku_descriptor; } @java.lang.Override public com.google.cloud.channel.v1.BillableSku getDefaultInstanceForType() { return com.google.cloud.channel.v1.BillableSku.getDefaultInstance(); } @java.lang.Override public com.google.cloud.channel.v1.BillableSku build() { com.google.cloud.channel.v1.BillableSku result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.channel.v1.BillableSku buildPartial() { com.google.cloud.channel.v1.BillableSku result = new com.google.cloud.channel.v1.BillableSku(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.channel.v1.BillableSku result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.sku_ = sku_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.skuDisplayName_ = skuDisplayName_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.service_ = service_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.serviceDisplayName_ = serviceDisplayName_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.channel.v1.BillableSku) { return mergeFrom((com.google.cloud.channel.v1.BillableSku) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.channel.v1.BillableSku other) { if (other == com.google.cloud.channel.v1.BillableSku.getDefaultInstance()) return this; if (!other.getSku().isEmpty()) { sku_ = other.sku_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getSkuDisplayName().isEmpty()) { skuDisplayName_ = other.skuDisplayName_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getService().isEmpty()) { service_ = other.service_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getServiceDisplayName().isEmpty()) { serviceDisplayName_ = other.serviceDisplayName_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { sku_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { skuDisplayName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { service_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { serviceDisplayName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object sku_ = ""; /** * * * <pre> * Resource name of Billable SKU. Format: * billableSkus/{sku}. * Example: * billableSkus/6E1B-6634-470F". * </pre> * * <code>string sku = 1;</code> * * @return The sku. */ public java.lang.String getSku() { java.lang.Object ref = sku_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); sku_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Resource name of Billable SKU. Format: * billableSkus/{sku}. * Example: * billableSkus/6E1B-6634-470F". * </pre> * * <code>string sku = 1;</code> * * @return The bytes for sku. */ public com.google.protobuf.ByteString getSkuBytes() { java.lang.Object ref = sku_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); sku_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Resource name of Billable SKU. Format: * billableSkus/{sku}. * Example: * billableSkus/6E1B-6634-470F". * </pre> * * <code>string sku = 1;</code> * * @param value The sku to set. * @return This builder for chaining. */ public Builder setSku(java.lang.String value) { if (value == null) { throw new NullPointerException(); } sku_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Resource name of Billable SKU. Format: * billableSkus/{sku}. * Example: * billableSkus/6E1B-6634-470F". * </pre> * * <code>string sku = 1;</code> * * @return This builder for chaining. */ public Builder clearSku() { sku_ = getDefaultInstance().getSku(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Resource name of Billable SKU. Format: * billableSkus/{sku}. * Example: * billableSkus/6E1B-6634-470F". * </pre> * * <code>string sku = 1;</code> * * @param value The bytes for sku to set. * @return This builder for chaining. */ public Builder setSkuBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); sku_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object skuDisplayName_ = ""; /** * * * <pre> * Unique human readable name for the SKU. * </pre> * * <code>string sku_display_name = 2;</code> * * @return The skuDisplayName. */ public java.lang.String getSkuDisplayName() { java.lang.Object ref = skuDisplayName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); skuDisplayName_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Unique human readable name for the SKU. * </pre> * * <code>string sku_display_name = 2;</code> * * @return The bytes for skuDisplayName. */ public com.google.protobuf.ByteString getSkuDisplayNameBytes() { java.lang.Object ref = skuDisplayName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); skuDisplayName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Unique human readable name for the SKU. * </pre> * * <code>string sku_display_name = 2;</code> * * @param value The skuDisplayName to set. * @return This builder for chaining. */ public Builder setSkuDisplayName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } skuDisplayName_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Unique human readable name for the SKU. * </pre> * * <code>string sku_display_name = 2;</code> * * @return This builder for chaining. */ public Builder clearSkuDisplayName() { skuDisplayName_ = getDefaultInstance().getSkuDisplayName(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Unique human readable name for the SKU. * </pre> * * <code>string sku_display_name = 2;</code> * * @param value The bytes for skuDisplayName to set. * @return This builder for chaining. */ public Builder setSkuDisplayNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); skuDisplayName_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object service_ = ""; /** * * * <pre> * Resource name of Service which contains Repricing SKU. Format: * services/{service}. * Example: * "services/B7D9-FDCB-15D8". * </pre> * * <code>string service = 3;</code> * * @return The service. */ public java.lang.String getService() { java.lang.Object ref = service_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); service_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Resource name of Service which contains Repricing SKU. Format: * services/{service}. * Example: * "services/B7D9-FDCB-15D8". * </pre> * * <code>string service = 3;</code> * * @return The bytes for service. */ public com.google.protobuf.ByteString getServiceBytes() { java.lang.Object ref = service_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); service_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Resource name of Service which contains Repricing SKU. Format: * services/{service}. * Example: * "services/B7D9-FDCB-15D8". * </pre> * * <code>string service = 3;</code> * * @param value The service to set. * @return This builder for chaining. */ public Builder setService(java.lang.String value) { if (value == null) { throw new NullPointerException(); } service_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Resource name of Service which contains Repricing SKU. Format: * services/{service}. * Example: * "services/B7D9-FDCB-15D8". * </pre> * * <code>string service = 3;</code> * * @return This builder for chaining. */ public Builder clearService() { service_ = getDefaultInstance().getService(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Resource name of Service which contains Repricing SKU. Format: * services/{service}. * Example: * "services/B7D9-FDCB-15D8". * </pre> * * <code>string service = 3;</code> * * @param value The bytes for service to set. * @return This builder for chaining. */ public Builder setServiceBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); service_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object serviceDisplayName_ = ""; /** * * * <pre> * Unique human readable name for the Service. * </pre> * * <code>string service_display_name = 4;</code> * * @return The serviceDisplayName. */ public java.lang.String getServiceDisplayName() { java.lang.Object ref = serviceDisplayName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); serviceDisplayName_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Unique human readable name for the Service. * </pre> * * <code>string service_display_name = 4;</code> * * @return The bytes for serviceDisplayName. */ public com.google.protobuf.ByteString getServiceDisplayNameBytes() { java.lang.Object ref = serviceDisplayName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); serviceDisplayName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Unique human readable name for the Service. * </pre> * * <code>string service_display_name = 4;</code> * * @param value The serviceDisplayName to set. * @return This builder for chaining. */ public Builder setServiceDisplayName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } serviceDisplayName_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Unique human readable name for the Service. * </pre> * * <code>string service_display_name = 4;</code> * * @return This builder for chaining. */ public Builder clearServiceDisplayName() { serviceDisplayName_ = getDefaultInstance().getServiceDisplayName(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Unique human readable name for the Service. * </pre> * * <code>string service_display_name = 4;</code> * * @param value The bytes for serviceDisplayName to set. * @return This builder for chaining. */ public Builder setServiceDisplayNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); serviceDisplayName_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.channel.v1.BillableSku) } // @@protoc_insertion_point(class_scope:google.cloud.channel.v1.BillableSku) private static final com.google.cloud.channel.v1.BillableSku DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.channel.v1.BillableSku(); } public static com.google.cloud.channel.v1.BillableSku getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<BillableSku> PARSER = new com.google.protobuf.AbstractParser<BillableSku>() { @java.lang.Override public BillableSku parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<BillableSku> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<BillableSku> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.channel.v1.BillableSku getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,095
java-deploy/proto-google-cloud-deploy-v1/src/main/java/com/google/cloud/deploy/v1/CustomTargetTypeNotificationEvent.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/deploy/v1/customtargettype_notification_payload.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.deploy.v1; /** * * * <pre> * Payload proto for "clouddeploy.googleapis.com/customtargettype_notification" * Platform Log event that describes the failure to send a custom target type * status change Pub/Sub notification. * </pre> * * Protobuf type {@code google.cloud.deploy.v1.CustomTargetTypeNotificationEvent} */ public final class CustomTargetTypeNotificationEvent extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.deploy.v1.CustomTargetTypeNotificationEvent) CustomTargetTypeNotificationEventOrBuilder { private static final long serialVersionUID = 0L; // Use CustomTargetTypeNotificationEvent.newBuilder() to construct. private CustomTargetTypeNotificationEvent( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CustomTargetTypeNotificationEvent() { message_ = ""; customTargetTypeUid_ = ""; customTargetType_ = ""; type_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CustomTargetTypeNotificationEvent(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.deploy.v1.CustomTargetTypeNotificationPayloadProto .internal_static_google_cloud_deploy_v1_CustomTargetTypeNotificationEvent_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.deploy.v1.CustomTargetTypeNotificationPayloadProto .internal_static_google_cloud_deploy_v1_CustomTargetTypeNotificationEvent_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent.class, com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent.Builder.class); } public static final int MESSAGE_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object message_ = ""; /** * * * <pre> * Debug message for when a notification fails to send. * </pre> * * <code>string message = 1;</code> * * @return The message. */ @java.lang.Override public java.lang.String getMessage() { java.lang.Object ref = message_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); message_ = s; return s; } } /** * * * <pre> * Debug message for when a notification fails to send. * </pre> * * <code>string message = 1;</code> * * @return The bytes for message. */ @java.lang.Override public com.google.protobuf.ByteString getMessageBytes() { java.lang.Object ref = message_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); message_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CUSTOM_TARGET_TYPE_UID_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object customTargetTypeUid_ = ""; /** * * * <pre> * Unique identifier of the `CustomTargetType`. * </pre> * * <code>string custom_target_type_uid = 4;</code> * * @return The customTargetTypeUid. */ @java.lang.Override public java.lang.String getCustomTargetTypeUid() { java.lang.Object ref = customTargetTypeUid_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); customTargetTypeUid_ = s; return s; } } /** * * * <pre> * Unique identifier of the `CustomTargetType`. * </pre> * * <code>string custom_target_type_uid = 4;</code> * * @return The bytes for customTargetTypeUid. */ @java.lang.Override public com.google.protobuf.ByteString getCustomTargetTypeUidBytes() { java.lang.Object ref = customTargetTypeUid_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); customTargetTypeUid_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CUSTOM_TARGET_TYPE_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object customTargetType_ = ""; /** * * * <pre> * The name of the `CustomTargetType`. * </pre> * * <code>string custom_target_type = 2;</code> * * @return The customTargetType. */ @java.lang.Override public java.lang.String getCustomTargetType() { java.lang.Object ref = customTargetType_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); customTargetType_ = s; return s; } } /** * * * <pre> * The name of the `CustomTargetType`. * </pre> * * <code>string custom_target_type = 2;</code> * * @return The bytes for customTargetType. */ @java.lang.Override public com.google.protobuf.ByteString getCustomTargetTypeBytes() { java.lang.Object ref = customTargetType_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); customTargetType_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int TYPE_FIELD_NUMBER = 3; private int type_ = 0; /** * * * <pre> * Type of this notification, e.g. for a Pub/Sub failure. * </pre> * * <code>.google.cloud.deploy.v1.Type type = 3;</code> * * @return The enum numeric value on the wire for type. */ @java.lang.Override public int getTypeValue() { return type_; } /** * * * <pre> * Type of this notification, e.g. for a Pub/Sub failure. * </pre> * * <code>.google.cloud.deploy.v1.Type type = 3;</code> * * @return The type. */ @java.lang.Override public com.google.cloud.deploy.v1.Type getType() { com.google.cloud.deploy.v1.Type result = com.google.cloud.deploy.v1.Type.forNumber(type_); return result == null ? com.google.cloud.deploy.v1.Type.UNRECOGNIZED : result; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(message_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, message_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customTargetType_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, customTargetType_); } if (type_ != com.google.cloud.deploy.v1.Type.TYPE_UNSPECIFIED.getNumber()) { output.writeEnum(3, type_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customTargetTypeUid_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, customTargetTypeUid_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(message_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, message_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customTargetType_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, customTargetType_); } if (type_ != com.google.cloud.deploy.v1.Type.TYPE_UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(3, type_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customTargetTypeUid_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, customTargetTypeUid_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent)) { return super.equals(obj); } com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent other = (com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent) obj; if (!getMessage().equals(other.getMessage())) return false; if (!getCustomTargetTypeUid().equals(other.getCustomTargetTypeUid())) return false; if (!getCustomTargetType().equals(other.getCustomTargetType())) return false; if (type_ != other.type_) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + MESSAGE_FIELD_NUMBER; hash = (53 * hash) + getMessage().hashCode(); hash = (37 * hash) + CUSTOM_TARGET_TYPE_UID_FIELD_NUMBER; hash = (53 * hash) + getCustomTargetTypeUid().hashCode(); hash = (37 * hash) + CUSTOM_TARGET_TYPE_FIELD_NUMBER; hash = (53 * hash) + getCustomTargetType().hashCode(); hash = (37 * hash) + TYPE_FIELD_NUMBER; hash = (53 * hash) + type_; hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Payload proto for "clouddeploy.googleapis.com/customtargettype_notification" * Platform Log event that describes the failure to send a custom target type * status change Pub/Sub notification. * </pre> * * Protobuf type {@code google.cloud.deploy.v1.CustomTargetTypeNotificationEvent} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.deploy.v1.CustomTargetTypeNotificationEvent) com.google.cloud.deploy.v1.CustomTargetTypeNotificationEventOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.deploy.v1.CustomTargetTypeNotificationPayloadProto .internal_static_google_cloud_deploy_v1_CustomTargetTypeNotificationEvent_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.deploy.v1.CustomTargetTypeNotificationPayloadProto .internal_static_google_cloud_deploy_v1_CustomTargetTypeNotificationEvent_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent.class, com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent.Builder.class); } // Construct using com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; message_ = ""; customTargetTypeUid_ = ""; customTargetType_ = ""; type_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.deploy.v1.CustomTargetTypeNotificationPayloadProto .internal_static_google_cloud_deploy_v1_CustomTargetTypeNotificationEvent_descriptor; } @java.lang.Override public com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent getDefaultInstanceForType() { return com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent.getDefaultInstance(); } @java.lang.Override public com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent build() { com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent buildPartial() { com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent result = new com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.message_ = message_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.customTargetTypeUid_ = customTargetTypeUid_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.customTargetType_ = customTargetType_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.type_ = type_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent) { return mergeFrom((com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent other) { if (other == com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent.getDefaultInstance()) return this; if (!other.getMessage().isEmpty()) { message_ = other.message_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getCustomTargetTypeUid().isEmpty()) { customTargetTypeUid_ = other.customTargetTypeUid_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getCustomTargetType().isEmpty()) { customTargetType_ = other.customTargetType_; bitField0_ |= 0x00000004; onChanged(); } if (other.type_ != 0) { setTypeValue(other.getTypeValue()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { message_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { customTargetType_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 18 case 24: { type_ = input.readEnum(); bitField0_ |= 0x00000008; break; } // case 24 case 34: { customTargetTypeUid_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object message_ = ""; /** * * * <pre> * Debug message for when a notification fails to send. * </pre> * * <code>string message = 1;</code> * * @return The message. */ public java.lang.String getMessage() { java.lang.Object ref = message_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); message_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Debug message for when a notification fails to send. * </pre> * * <code>string message = 1;</code> * * @return The bytes for message. */ public com.google.protobuf.ByteString getMessageBytes() { java.lang.Object ref = message_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); message_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Debug message for when a notification fails to send. * </pre> * * <code>string message = 1;</code> * * @param value The message to set. * @return This builder for chaining. */ public Builder setMessage(java.lang.String value) { if (value == null) { throw new NullPointerException(); } message_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Debug message for when a notification fails to send. * </pre> * * <code>string message = 1;</code> * * @return This builder for chaining. */ public Builder clearMessage() { message_ = getDefaultInstance().getMessage(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Debug message for when a notification fails to send. * </pre> * * <code>string message = 1;</code> * * @param value The bytes for message to set. * @return This builder for chaining. */ public Builder setMessageBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); message_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object customTargetTypeUid_ = ""; /** * * * <pre> * Unique identifier of the `CustomTargetType`. * </pre> * * <code>string custom_target_type_uid = 4;</code> * * @return The customTargetTypeUid. */ public java.lang.String getCustomTargetTypeUid() { java.lang.Object ref = customTargetTypeUid_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); customTargetTypeUid_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Unique identifier of the `CustomTargetType`. * </pre> * * <code>string custom_target_type_uid = 4;</code> * * @return The bytes for customTargetTypeUid. */ public com.google.protobuf.ByteString getCustomTargetTypeUidBytes() { java.lang.Object ref = customTargetTypeUid_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); customTargetTypeUid_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Unique identifier of the `CustomTargetType`. * </pre> * * <code>string custom_target_type_uid = 4;</code> * * @param value The customTargetTypeUid to set. * @return This builder for chaining. */ public Builder setCustomTargetTypeUid(java.lang.String value) { if (value == null) { throw new NullPointerException(); } customTargetTypeUid_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Unique identifier of the `CustomTargetType`. * </pre> * * <code>string custom_target_type_uid = 4;</code> * * @return This builder for chaining. */ public Builder clearCustomTargetTypeUid() { customTargetTypeUid_ = getDefaultInstance().getCustomTargetTypeUid(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Unique identifier of the `CustomTargetType`. * </pre> * * <code>string custom_target_type_uid = 4;</code> * * @param value The bytes for customTargetTypeUid to set. * @return This builder for chaining. */ public Builder setCustomTargetTypeUidBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); customTargetTypeUid_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object customTargetType_ = ""; /** * * * <pre> * The name of the `CustomTargetType`. * </pre> * * <code>string custom_target_type = 2;</code> * * @return The customTargetType. */ public java.lang.String getCustomTargetType() { java.lang.Object ref = customTargetType_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); customTargetType_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The name of the `CustomTargetType`. * </pre> * * <code>string custom_target_type = 2;</code> * * @return The bytes for customTargetType. */ public com.google.protobuf.ByteString getCustomTargetTypeBytes() { java.lang.Object ref = customTargetType_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); customTargetType_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The name of the `CustomTargetType`. * </pre> * * <code>string custom_target_type = 2;</code> * * @param value The customTargetType to set. * @return This builder for chaining. */ public Builder setCustomTargetType(java.lang.String value) { if (value == null) { throw new NullPointerException(); } customTargetType_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The name of the `CustomTargetType`. * </pre> * * <code>string custom_target_type = 2;</code> * * @return This builder for chaining. */ public Builder clearCustomTargetType() { customTargetType_ = getDefaultInstance().getCustomTargetType(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * The name of the `CustomTargetType`. * </pre> * * <code>string custom_target_type = 2;</code> * * @param value The bytes for customTargetType to set. * @return This builder for chaining. */ public Builder setCustomTargetTypeBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); customTargetType_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private int type_ = 0; /** * * * <pre> * Type of this notification, e.g. for a Pub/Sub failure. * </pre> * * <code>.google.cloud.deploy.v1.Type type = 3;</code> * * @return The enum numeric value on the wire for type. */ @java.lang.Override public int getTypeValue() { return type_; } /** * * * <pre> * Type of this notification, e.g. for a Pub/Sub failure. * </pre> * * <code>.google.cloud.deploy.v1.Type type = 3;</code> * * @param value The enum numeric value on the wire for type to set. * @return This builder for chaining. */ public Builder setTypeValue(int value) { type_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Type of this notification, e.g. for a Pub/Sub failure. * </pre> * * <code>.google.cloud.deploy.v1.Type type = 3;</code> * * @return The type. */ @java.lang.Override public com.google.cloud.deploy.v1.Type getType() { com.google.cloud.deploy.v1.Type result = com.google.cloud.deploy.v1.Type.forNumber(type_); return result == null ? com.google.cloud.deploy.v1.Type.UNRECOGNIZED : result; } /** * * * <pre> * Type of this notification, e.g. for a Pub/Sub failure. * </pre> * * <code>.google.cloud.deploy.v1.Type type = 3;</code> * * @param value The type to set. * @return This builder for chaining. */ public Builder setType(com.google.cloud.deploy.v1.Type value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; type_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Type of this notification, e.g. for a Pub/Sub failure. * </pre> * * <code>.google.cloud.deploy.v1.Type type = 3;</code> * * @return This builder for chaining. */ public Builder clearType() { bitField0_ = (bitField0_ & ~0x00000008); type_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.deploy.v1.CustomTargetTypeNotificationEvent) } // @@protoc_insertion_point(class_scope:google.cloud.deploy.v1.CustomTargetTypeNotificationEvent) private static final com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent(); } public static com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CustomTargetTypeNotificationEvent> PARSER = new com.google.protobuf.AbstractParser<CustomTargetTypeNotificationEvent>() { @java.lang.Override public CustomTargetTypeNotificationEvent parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CustomTargetTypeNotificationEvent> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CustomTargetTypeNotificationEvent> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.deploy.v1.CustomTargetTypeNotificationEvent getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,118
java-dataproc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListSessionTemplatesRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dataproc/v1/session_templates.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dataproc.v1; /** * * * <pre> * A request to list session templates in a project. * </pre> * * Protobuf type {@code google.cloud.dataproc.v1.ListSessionTemplatesRequest} */ public final class ListSessionTemplatesRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.ListSessionTemplatesRequest) ListSessionTemplatesRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListSessionTemplatesRequest.newBuilder() to construct. private ListSessionTemplatesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListSessionTemplatesRequest() { parent_ = ""; pageToken_ = ""; filter_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListSessionTemplatesRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataproc.v1.SessionTemplatesProto .internal_static_google_cloud_dataproc_v1_ListSessionTemplatesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataproc.v1.SessionTemplatesProto .internal_static_google_cloud_dataproc_v1_ListSessionTemplatesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataproc.v1.ListSessionTemplatesRequest.class, com.google.cloud.dataproc.v1.ListSessionTemplatesRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent that owns this collection of session templates. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The parent that owns this collection of session templates. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_ = 0; /** * * * <pre> * Optional. The maximum number of sessions to return in each response. * The service may return fewer than this value. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. A page token received from a previous `ListSessions` call. * Provide this token to retrieve the subsequent page. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * Optional. A page token received from a previous `ListSessions` call. * Provide this token to retrieve the subsequent page. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object filter_ = ""; /** * * * <pre> * Optional. A filter for the session templates to return in the response. * Filters are case sensitive and have the following syntax: * * [field = value] AND [field [= value]] ... * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ @java.lang.Override public java.lang.String getFilter() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * * * <pre> * Optional. A filter for the session templates to return in the response. * Filters are case sensitive and have the following syntax: * * [field = value] AND [field [= value]] ... * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ @java.lang.Override public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dataproc.v1.ListSessionTemplatesRequest)) { return super.equals(obj); } com.google.cloud.dataproc.v1.ListSessionTemplatesRequest other = (com.google.cloud.dataproc.v1.ListSessionTemplatesRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getFilter().equals(other.getFilter())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dataproc.v1.ListSessionTemplatesRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataproc.v1.ListSessionTemplatesRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataproc.v1.ListSessionTemplatesRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataproc.v1.ListSessionTemplatesRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataproc.v1.ListSessionTemplatesRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataproc.v1.ListSessionTemplatesRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataproc.v1.ListSessionTemplatesRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataproc.v1.ListSessionTemplatesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataproc.v1.ListSessionTemplatesRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dataproc.v1.ListSessionTemplatesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataproc.v1.ListSessionTemplatesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataproc.v1.ListSessionTemplatesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.dataproc.v1.ListSessionTemplatesRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A request to list session templates in a project. * </pre> * * Protobuf type {@code google.cloud.dataproc.v1.ListSessionTemplatesRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.ListSessionTemplatesRequest) com.google.cloud.dataproc.v1.ListSessionTemplatesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataproc.v1.SessionTemplatesProto .internal_static_google_cloud_dataproc_v1_ListSessionTemplatesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataproc.v1.SessionTemplatesProto .internal_static_google_cloud_dataproc_v1_ListSessionTemplatesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataproc.v1.ListSessionTemplatesRequest.class, com.google.cloud.dataproc.v1.ListSessionTemplatesRequest.Builder.class); } // Construct using com.google.cloud.dataproc.v1.ListSessionTemplatesRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; pageSize_ = 0; pageToken_ = ""; filter_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dataproc.v1.SessionTemplatesProto .internal_static_google_cloud_dataproc_v1_ListSessionTemplatesRequest_descriptor; } @java.lang.Override public com.google.cloud.dataproc.v1.ListSessionTemplatesRequest getDefaultInstanceForType() { return com.google.cloud.dataproc.v1.ListSessionTemplatesRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dataproc.v1.ListSessionTemplatesRequest build() { com.google.cloud.dataproc.v1.ListSessionTemplatesRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dataproc.v1.ListSessionTemplatesRequest buildPartial() { com.google.cloud.dataproc.v1.ListSessionTemplatesRequest result = new com.google.cloud.dataproc.v1.ListSessionTemplatesRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.dataproc.v1.ListSessionTemplatesRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.filter_ = filter_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dataproc.v1.ListSessionTemplatesRequest) { return mergeFrom((com.google.cloud.dataproc.v1.ListSessionTemplatesRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dataproc.v1.ListSessionTemplatesRequest other) { if (other == com.google.cloud.dataproc.v1.ListSessionTemplatesRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { filter_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent that owns this collection of session templates. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The parent that owns this collection of session templates. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The parent that owns this collection of session templates. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The parent that owns this collection of session templates. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The parent that owns this collection of session templates. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Optional. The maximum number of sessions to return in each response. * The service may return fewer than this value. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Optional. The maximum number of sessions to return in each response. * The service may return fewer than this value. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. The maximum number of sessions to return in each response. * The service may return fewer than this value. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000002); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. A page token received from a previous `ListSessions` call. * Provide this token to retrieve the subsequent page. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. A page token received from a previous `ListSessions` call. * Provide this token to retrieve the subsequent page. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. A page token received from a previous `ListSessions` call. * Provide this token to retrieve the subsequent page. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. A page token received from a previous `ListSessions` call. * Provide this token to retrieve the subsequent page. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Optional. A page token received from a previous `ListSessions` call. * Provide this token to retrieve the subsequent page. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object filter_ = ""; /** * * * <pre> * Optional. A filter for the session templates to return in the response. * Filters are case sensitive and have the following syntax: * * [field = value] AND [field [= value]] ... * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ public java.lang.String getFilter() { java.lang.Object ref = filter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. A filter for the session templates to return in the response. * Filters are case sensitive and have the following syntax: * * [field = value] AND [field [= value]] ... * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. A filter for the session templates to return in the response. * Filters are case sensitive and have the following syntax: * * [field = value] AND [field [= value]] ... * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The filter to set. * @return This builder for chaining. */ public Builder setFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Optional. A filter for the session templates to return in the response. * Filters are case sensitive and have the following syntax: * * [field = value] AND [field [= value]] ... * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Optional. A filter for the session templates to return in the response. * Filters are case sensitive and have the following syntax: * * [field = value] AND [field [= value]] ... * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for filter to set. * @return This builder for chaining. */ public Builder setFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.ListSessionTemplatesRequest) } // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ListSessionTemplatesRequest) private static final com.google.cloud.dataproc.v1.ListSessionTemplatesRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.ListSessionTemplatesRequest(); } public static com.google.cloud.dataproc.v1.ListSessionTemplatesRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListSessionTemplatesRequest> PARSER = new com.google.protobuf.AbstractParser<ListSessionTemplatesRequest>() { @java.lang.Override public ListSessionTemplatesRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListSessionTemplatesRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListSessionTemplatesRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dataproc.v1.ListSessionTemplatesRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
oracle/graal
36,313
visualizer/IdealGraphVisualizer/Data/src/main/java/jdk/graal/compiler/graphio/GraphProtocol.java
/* * Copyright (c) 2011, 2023, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package jdk.graal.compiler.graphio; import java.io.Closeable; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.nio.ByteBuffer; import java.nio.channels.WritableByteChannel; import java.nio.charset.Charset; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.WeakHashMap; abstract class GraphProtocol<Graph, Node, NodeClass, Edges, Block, ResolvedJavaMethod, ResolvedJavaField, Signature, NodeSourcePosition, Location> implements Closeable { private static final Charset UTF8 = Charset.forName("UTF-8"); private static final int CONSTANT_POOL_MAX_SIZE = 8000; private static final int BEGIN_GROUP = 0x00; private static final int BEGIN_GRAPH = 0x01; private static final int CLOSE_GROUP = 0x02; private static final int BEGIN_DOCUMENT = 0x03; private static final int POOL_NEW = 0x00; private static final int POOL_STRING = 0x01; private static final int POOL_ENUM = 0x02; private static final int POOL_CLASS = 0x03; private static final int POOL_METHOD = 0x04; private static final int POOL_NULL = 0x05; private static final int POOL_NODE_CLASS = 0x06; private static final int POOL_FIELD = 0x07; private static final int POOL_SIGNATURE = 0x08; private static final int POOL_NODE_SOURCE_POSITION = 0x09; private static final int POOL_NODE = 0x0a; private static final int PROPERTY_POOL = 0x00; private static final int PROPERTY_INT = 0x01; private static final int PROPERTY_LONG = 0x02; private static final int PROPERTY_DOUBLE = 0x03; private static final int PROPERTY_FLOAT = 0x04; private static final int PROPERTY_TRUE = 0x05; private static final int PROPERTY_FALSE = 0x06; private static final int PROPERTY_ARRAY = 0x07; private static final int PROPERTY_SUBGRAPH = 0x08; private static final int KLASS = 0x00; private static final int ENUM_KLASS = 0x01; private static final byte[] MAGIC_BYTES = {'B', 'I', 'G', 'V'}; private static final int MAJOR_VERSION = 8; private static final int MINOR_VERSION = 0; private final ConstantPool constantPool; private final ByteBuffer buffer; private final WritableByteChannel channel; private final boolean embedded; final int versionMajor; final int versionMinor; private boolean printing; GraphProtocol(WritableByteChannel channel, int major, int minor, boolean embedded) throws IOException { if (major > MAJOR_VERSION || (major == MAJOR_VERSION && minor > MINOR_VERSION)) { throw new IllegalArgumentException("Unrecognized version " + major + "." + minor); } this.versionMajor = major; this.versionMinor = minor; this.constantPool = new ConstantPool(); this.buffer = ByteBuffer.allocateDirect(256 * 1024); this.channel = channel; this.embedded = embedded; if (!embedded) { writeVersion(); flushEmbedded(); } } GraphProtocol(GraphProtocol<?, ?, ?, ?, ?, ?, ?, ?, ?, ?> parent) { this.versionMajor = parent.versionMajor; this.versionMinor = parent.versionMinor; this.constantPool = parent.constantPool; this.buffer = parent.buffer; this.channel = parent.channel; this.embedded = parent.embedded; } @SuppressWarnings("all") public final void print(Graph graph, Map<? extends Object, ? extends Object> properties, int id, String format, Object... args) throws IOException { printing = true; try { writeByte(BEGIN_GRAPH); if (versionMajor >= 3) { writeInt(id); writeString(format); writeInt(args.length); for (Object a : args) { writePropertyObject(graph, a); } } else { writePoolObject(formatTitle(graph, id, format, args)); } writeGraph(graph, properties); flushEmbedded(); flush(); } finally { printing = false; } } public final void startDocument(Map<? extends Object, ? extends Object> documentProperties) throws IOException { if (versionMajor < 7) { throw new IllegalStateException("Dump properties unsupported in format v." + versionMajor); } printing = true; try { writeByte(BEGIN_DOCUMENT); writeProperties(null, documentProperties); } finally { printing = false; } } public final void beginGroup(Graph noGraph, String name, String shortName, ResolvedJavaMethod method, int bci, Map<? extends Object, ? extends Object> properties) throws IOException { printing = true; try { writeByte(BEGIN_GROUP); writePoolObject(name); writePoolObject(shortName); writePoolObject(method); writeInt(bci); writeProperties(noGraph, properties); flushEmbedded(); } finally { printing = false; } } public final void endGroup() throws IOException { printing = true; try { writeByte(CLOSE_GROUP); flushEmbedded(); } finally { printing = false; } } final int write(ByteBuffer src) throws IOException { if (printing) { throw new IllegalStateException("Trying to write during graph print."); } constantPool.reset(); return writeBytesRaw(src); } final boolean isOpen() { return channel.isOpen(); } @Override public final void close() { try { flush(); channel.close(); } catch (IOException ex) { throw new Error(ex); } } protected abstract Graph findGraph(Graph current, Object obj); protected abstract ResolvedJavaMethod findMethod(Object obj); /** * Attempts to recognize the provided object as a node. Used to encode it with * {@link #POOL_NODE} pool type. * * @param obj any object * @return <code>null</code> if it is not a node object, non-null otherwise */ protected abstract Node findNode(Object obj); /** * Determines whether the provided object is node class or not. * * @param obj object to check * @return {@code null} if {@code obj} does not represent a NodeClass otherwise the NodeClass * represented by {@code obj} */ protected abstract NodeClass findNodeClass(Object obj); /** * Returns the NodeClass for a given Node {@code obj}. * * @param obj instance of node * @return non-{@code null} instance of the node's class object */ protected abstract NodeClass findClassForNode(Node obj); /** * Find a Java class. The returned object must be acceptable by * {@link #findJavaTypeName(java.lang.Object)} and return valid name for the class. * * @param clazz node class object * @return object representing the class, for example {@link Class} */ protected abstract Object findJavaClass(NodeClass clazz); protected abstract Object findEnumClass(Object enumValue); protected abstract String findNameTemplate(NodeClass clazz); protected abstract Edges findClassEdges(NodeClass nodeClass, boolean dumpInputs); protected abstract int findNodeId(Node n); protected abstract boolean hasPredecessor(Node node); protected abstract int findNodesCount(Graph info); protected abstract Iterable<? extends Node> findNodes(Graph info); protected abstract void findNodeProperties(Node node, Map<String, Object> props, Graph info); protected abstract Collection<? extends Node> findBlockNodes(Graph info, Block block); protected abstract int findBlockId(Block sux); protected abstract Collection<? extends Block> findBlocks(Graph graph); protected abstract Collection<? extends Block> findBlockSuccessors(Block block); protected abstract String formatTitle(Graph graph, int id, String format, Object... args); protected abstract int findSize(Edges edges); protected abstract boolean isDirect(Edges edges, int i); protected abstract String findName(Edges edges, int i); protected abstract Object findType(Edges edges, int i); protected abstract Collection<? extends Node> findNodes(Graph graph, Node node, Edges edges, int i); protected abstract int findEnumOrdinal(Object obj); protected abstract String[] findEnumTypeValues(Object clazz); protected abstract String findJavaTypeName(Object obj); protected abstract byte[] findMethodCode(ResolvedJavaMethod method); protected abstract int findMethodModifiers(ResolvedJavaMethod method); protected abstract Signature findMethodSignature(ResolvedJavaMethod method); protected abstract String findMethodName(ResolvedJavaMethod method); protected abstract Object findMethodDeclaringClass(ResolvedJavaMethod method); protected abstract int findFieldModifiers(ResolvedJavaField field); protected abstract String findFieldTypeName(ResolvedJavaField field); protected abstract String findFieldName(ResolvedJavaField field); protected abstract Object findFieldDeclaringClass(ResolvedJavaField field); protected abstract ResolvedJavaField findJavaField(Object object); protected abstract Signature findSignature(Object object); protected abstract int findSignatureParameterCount(Signature signature); protected abstract String findSignatureParameterTypeName(Signature signature, int index); protected abstract String findSignatureReturnTypeName(Signature signature); protected abstract NodeSourcePosition findNodeSourcePosition(Object object); protected abstract ResolvedJavaMethod findNodeSourcePositionMethod(NodeSourcePosition pos); protected abstract NodeSourcePosition findNodeSourcePositionCaller(NodeSourcePosition pos); protected abstract int findNodeSourcePositionBCI(NodeSourcePosition pos); protected abstract Iterable<Location> findLocation(ResolvedJavaMethod method, int bci, NodeSourcePosition pos); protected abstract String findLocationFile(Location loc) throws IOException; protected abstract int findLocationLine(Location loc); protected abstract URI findLocationURI(Location loc) throws URISyntaxException; protected abstract String findLocationLanguage(Location loc); protected abstract int findLocationStart(Location loc); protected abstract int findLocationEnd(Location loc); private void writeVersion() throws IOException { writeBytesRaw(MAGIC_BYTES); writeByte(versionMajor); writeByte(versionMinor); } private void flushEmbedded() throws IOException { if (embedded) { flush(); constantPool.reset(); } } private void flush() throws IOException { buffer.flip(); /* * Try not to let interrupted threads abort the write. There's still a race here but an * interrupt that's been pending for a long time shouldn't stop this writing. */ boolean interrupted = Thread.interrupted(); try { channel.write(buffer); } finally { if (interrupted) { Thread.currentThread().interrupt(); } } buffer.compact(); } private void ensureAvailable(int i) throws IOException { assert buffer.capacity() >= i : "Can not make " + i + " bytes available, buffer is too small"; while (buffer.remaining() < i) { flush(); } } private void writeByte(int b) throws IOException { ensureAvailable(1); buffer.put((byte) b); } private void writeInt(int b) throws IOException { ensureAvailable(4); buffer.putInt(b); } private void writeLong(long b) throws IOException { ensureAvailable(8); buffer.putLong(b); } private void writeDouble(double b) throws IOException { ensureAvailable(8); buffer.putDouble(b); } private void writeFloat(float b) throws IOException { ensureAvailable(4); buffer.putFloat(b); } private void writeShort(char b) throws IOException { ensureAvailable(2); buffer.putChar(b); } private void writeString(String str) throws IOException { byte[] bytes = str.getBytes(UTF8); writeBytes(bytes); } private void writeBytes(byte[] b) throws IOException { if (b == null) { writeInt(-1); } else { writeInt(b.length); writeBytesRaw(b); } } private void writeBytesRaw(byte[] b) throws IOException { int bytesWritten = 0; while (bytesWritten < b.length) { int toWrite = Math.min(b.length - bytesWritten, buffer.capacity()); ensureAvailable(toWrite); buffer.put(b, bytesWritten, toWrite); bytesWritten += toWrite; } } private int writeBytesRaw(ByteBuffer b) throws IOException { int limit = b.limit(); int written = 0; while (b.position() < limit) { int toWrite = Math.min(limit - b.position(), buffer.capacity()); ensureAvailable(toWrite); b.limit(b.position() + toWrite); try { buffer.put(b); written += toWrite; } finally { b.limit(limit); } } return written; } private void writeInts(int[] b) throws IOException { if (b == null) { writeInt(-1); } else { writeInt(b.length); int sizeInBytes = b.length * 4; ensureAvailable(sizeInBytes); buffer.asIntBuffer().put(b); buffer.position(buffer.position() + sizeInBytes); } } private void writeDoubles(double[] b) throws IOException { if (b == null) { writeInt(-1); } else { writeInt(b.length); int sizeInBytes = b.length * 8; ensureAvailable(sizeInBytes); buffer.asDoubleBuffer().put(b); buffer.position(buffer.position() + sizeInBytes); } } private void writePoolObject(Object obj) throws IOException { Object object = obj; if (object == null) { writeByte(POOL_NULL); return; } Object[] found = new Object[1]; int type = findPoolType(object, found); Character id = constantPool.get(object, type); if (id == null) { addPoolEntry(object, type, found); } else { writeByte(type); writeShort(id.charValue()); } } private int findPoolType(Object obj, Object[] found) throws IOException { Object object = obj; if (object == null) { return POOL_NULL; } if (isFound(findJavaField(object), found)) { return POOL_FIELD; } else if (isFound(findSignature(object), found)) { return POOL_SIGNATURE; } else if (versionMajor >= 4 && isFound(findNodeSourcePosition(object), found)) { return POOL_NODE_SOURCE_POSITION; } else { final Node node = findNode(object); if (versionMajor == 4 && node != null) { object = classForNode(node); } if (isFound(findNodeClass(object), found)) { return POOL_NODE_CLASS; } else if (versionMajor >= 5 && isFound(node, found)) { return POOL_NODE; } else if (isFound(findMethod(object), found)) { return POOL_METHOD; } else if (object instanceof Enum<?>) { if (found != null) { found[0] = ((Enum<?>) object).ordinal(); } return POOL_ENUM; } else { int val = findEnumOrdinal(object); if (val >= 0) { if (found != null) { found[0] = val; } return POOL_ENUM; } else if (object instanceof Class<?>) { if (found != null) { found[0] = ((Class<?>) object).getName(); } return POOL_CLASS; } else if (isFound(findJavaTypeName(object), found)) { return POOL_CLASS; } else { return POOL_STRING; } } } } private void writeGraph(Graph graph, Map<? extends Object, ? extends Object> properties) throws IOException { writeProperties(graph, properties); writeNodes(graph); writeBlocks(findBlocks(graph), graph); } private void writeNodes(Graph info) throws IOException { Map<String, Object> props = new LinkedHashMap<>(); final int size = findNodesCount(info); writeInt(size); int cnt = 0; for (Node node : findNodes(info)) { NodeClass nodeClass = classForNode(node); findNodeProperties(node, props, info); writeInt(findNodeId(node)); writePoolObject(nodeClass); writeByte(hasPredecessor(node) ? 1 : 0); writeProperties(info, props); writeEdges(info, node, true); writeEdges(info, node, false); props.clear(); cnt++; } if (size != cnt) { throw new IOException("Expecting " + size + " nodes, but found " + cnt); } } private void writeEdges(Graph graph, Node node, boolean dumpInputs) throws IOException { NodeClass clazz = classForNode(node); Edges edges = findClassEdges(clazz, dumpInputs); int size = findSize(edges); for (int i = 0; i < size; i++) { Collection<? extends Node> list = findNodes(graph, node, edges, i); if (isDirect(edges, i)) { if (list != null && list.size() != 1) { throw new IOException("Edge " + i + " in " + edges + " is direct, but list isn't singleton: " + list); } Node n = null; if (list != null && !list.isEmpty()) { n = list.iterator().next(); } writeNodeRef(n); } else { if (list == null) { writeShort((char) 0); } else { int listSize = list.size(); if (listSize != ((char) listSize)) { throw new IOException("Too many nodes in list: " + list.size()); } writeShort((char) listSize); for (Node edge : list) { writeNodeRef(edge); } } } } } private NodeClass classForNode(Node node) throws IOException { NodeClass clazz = findClassForNode(node); if (clazz == null) { throw new IOException("No class for " + node); } return clazz; } private void writeNodeRef(Node node) throws IOException { writeInt(findNodeId(node)); } private void writeBlocks(Collection<? extends Block> blocks, Graph info) throws IOException { if (blocks != null) { for (Block block : blocks) { Collection<? extends Node> nodes = findBlockNodes(info, block); if (nodes == null) { writeInt(0); return; } } writeInt(blocks.size()); for (Block block : blocks) { Collection<? extends Node> nodes = findBlockNodes(info, block); writeInt(findBlockId(block)); writeInt(nodes.size()); for (Node node : nodes) { writeInt(findNodeId(node)); } final Collection<? extends Block> successors = findBlockSuccessors(block); writeInt(successors.size()); for (Block sux : successors) { writeInt(findBlockId(sux)); } } } else { writeInt(0); } } private void writeEdgesInfo(NodeClass nodeClass, boolean dumpInputs) throws IOException { Edges edges = findClassEdges(nodeClass, dumpInputs); int size = findSize(edges); writeShort((char) size); for (int i = 0; i < size; i++) { writeByte(isDirect(edges, i) ? 0 : 1); writePoolObject(findName(edges, i)); if (dumpInputs) { writePoolObject(findType(edges, i)); } } } @SuppressWarnings("unchecked") private void addPoolEntry(Object obj, int type, Object[] found) throws IOException { Object object = obj; char index = constantPool.add(object, type); writeByte(POOL_NEW); writeShort(index); writeByte(type); switch (type) { case POOL_FIELD: { ResolvedJavaField field = (ResolvedJavaField) found[0]; Objects.requireNonNull(field); writePoolObject(findFieldDeclaringClass(field)); writePoolObject(findFieldName(field)); writePoolObject(findFieldTypeName(field)); writeInt(findFieldModifiers(field)); break; } case POOL_SIGNATURE: { Signature signature = (Signature) found[0]; int args = findSignatureParameterCount(signature); writeShort((char) args); for (int i = 0; i < args; i++) { writePoolObject(findSignatureParameterTypeName(signature, i)); } writePoolObject(findSignatureReturnTypeName(signature)); break; } case POOL_NODE_SOURCE_POSITION: { NodeSourcePosition pos = (NodeSourcePosition) found[0]; Objects.requireNonNull(pos); ResolvedJavaMethod method = findNodeSourcePositionMethod(pos); writePoolObject(method); final int bci = findNodeSourcePositionBCI(pos); writeInt(bci); Iterator<Location> ste = findLocation(method, bci, pos).iterator(); if (versionMajor >= 6) { while (ste.hasNext()) { Location loc = ste.next(); URI uri; try { uri = findLocationURI(loc); } catch (URISyntaxException ex) { throw new IOException(ex); } if (uri == null) { continue; } String l = findLocationLanguage(loc); if (l == null) { continue; } writePoolObject(uri.toString()); writeString(l); writeInt(findLocationLine(loc)); writeInt(findLocationStart(loc)); writeInt(findLocationEnd(loc)); } writePoolObject(null); } else { Location first = ste.hasNext() ? ste.next() : null; String fileName = first != null ? findLocationFile(first) : null; if (fileName != null) { writePoolObject(fileName); writeInt(findLocationLine(first)); } else { writePoolObject(null); } } writePoolObject(findNodeSourcePositionCaller(pos)); break; } case POOL_NODE: { Node node = (Node) found[0]; Objects.requireNonNull(node); writeInt(findNodeId(node)); writePoolObject(classForNode(node)); break; } case POOL_NODE_CLASS: { NodeClass nodeClass = (NodeClass) found[0]; final Object clazz = findJavaClass(nodeClass); if (versionMajor >= 3) { writePoolObject(clazz); writeString(findNameTemplate(nodeClass)); } else { writeString(((Class<?>) clazz).getSimpleName()); String nameTemplate = findNameTemplate(nodeClass); writeString(nameTemplate); } writeEdgesInfo(nodeClass, true); writeEdgesInfo(nodeClass, false); break; } case POOL_CLASS: { String typeName = (String) found[0]; Objects.requireNonNull(typeName); writeString(typeName); String[] enumValueNames = findEnumTypeValues(object); if (enumValueNames != null) { writeByte(ENUM_KLASS); writeInt(enumValueNames.length); for (String o : enumValueNames) { writePoolObject(o); } } else { writeByte(KLASS); } break; } case POOL_METHOD: { ResolvedJavaMethod method = (ResolvedJavaMethod) found[0]; Objects.requireNonNull(method); writePoolObject(findMethodDeclaringClass(method)); writePoolObject(findMethodName(method)); final Signature methodSignature = findMethodSignature(method); if (findSignature(methodSignature) == null) { throw new IOException("Should be recognized as signature: " + methodSignature + " for " + method); } writePoolObject(methodSignature); writeInt(findMethodModifiers(method)); writeBytes(findMethodCode(method)); break; } case POOL_ENUM: { int enumOrdinal = (int) found[0]; writePoolObject(findEnumClass(object)); writeInt(enumOrdinal); break; } case POOL_STRING: { writeString(object.toString()); break; } default: throw new IllegalStateException(); } } private void writePropertyObject(Graph graph, Object obj) throws IOException { if (obj instanceof Integer) { writeByte(PROPERTY_INT); writeInt(((Integer) obj).intValue()); } else if (obj instanceof Long) { writeByte(PROPERTY_LONG); writeLong(((Long) obj).longValue()); } else if (obj instanceof Double) { writeByte(PROPERTY_DOUBLE); writeDouble(((Double) obj).doubleValue()); } else if (obj instanceof Float) { writeByte(PROPERTY_FLOAT); writeFloat(((Float) obj).floatValue()); } else if (obj instanceof Boolean) { if (((Boolean) obj).booleanValue()) { writeByte(PROPERTY_TRUE); } else { writeByte(PROPERTY_FALSE); } } else if (obj != null && obj.getClass().isArray()) { Class<?> componentType = obj.getClass().getComponentType(); if (componentType.isPrimitive()) { if (componentType == Double.TYPE) { writeByte(PROPERTY_ARRAY); writeByte(PROPERTY_DOUBLE); writeDoubles((double[]) obj); } else if (componentType == Integer.TYPE) { writeByte(PROPERTY_ARRAY); writeByte(PROPERTY_INT); writeInts((int[]) obj); } else { writeByte(PROPERTY_POOL); writePoolObject(obj); } } else { writeByte(PROPERTY_ARRAY); writeByte(PROPERTY_POOL); Object[] array = (Object[]) obj; writeInt(array.length); for (Object o : array) { writePoolObject(o); } } } else { Graph g = findGraph(graph, obj); if (g == null) { writeByte(PROPERTY_POOL); writePoolObject(obj); } else { writeByte(PROPERTY_SUBGRAPH); writeGraph(g, null); } } } private void writeProperties(Graph graph, Map<? extends Object, ? extends Object> props) throws IOException { if (props == null) { writeShort((char) 0); return; } final int size = props.size(); // properties if (size >= Character.MAX_VALUE) { if (versionMajor > 7) { writeShort(Character.MAX_VALUE); writeInt(size); } else { throw new IllegalArgumentException("Property count is too big. Properties can contain only " + (Character.MAX_VALUE - 1) + " in version < 8."); } } else { writeShort((char) size); } int cnt = 0; for (Map.Entry<? extends Object, ? extends Object> entry : props.entrySet()) { String key = entry.getKey().toString(); writePoolObject(key); writePropertyObject(graph, entry.getValue()); cnt++; } if (size != cnt) { throw new IOException("Expecting " + size + " properties, but found only " + cnt); } } private static boolean isFound(Object obj, Object[] found) { if (obj == null) { return false; } if (found != null) { found[0] = obj; } return true; } private static Set<Class<?>> badToString; /** * This is a helper to identify objects that are encoded as POOL_STRING and have a poor * {@link Object#toString()} implementation where two objects that are * {@link Object#equals(Object)} have different String representations. Only the first mismatch * is reported since this is a systematic issue and reporting every failure would be too much * useless output. */ private static synchronized void reportBadToString(Object lookupKey, Object value) { if (badToString == null) { badToString = new LinkedHashSet<>(); } if (badToString.add(lookupKey.getClass())) { System.err.println("GraphProtocol: toString mismatch for " + lookupKey.getClass() + ": " + value + " != " + lookupKey.toString()); } } private static boolean checkToString(Object lookupKey, Object value) { if (!lookupKey.toString().equals(value)) { reportBadToString(lookupKey, value); } return true; } /** * This class maintains a limited pool of constants for use by the graph protocol. Once the * cache fills up the oldest slots are replaced with new values in a cyclic fashion. */ private static final class ConstantPool { private char nextId; /* * A mapping from an object to the pool entry that represents it. Normally the value is the * Character id of the entry but for {@link POOL_STRING} entries a second forwarding entry * might be created. A {@link POOL_STRING} can be looked up either by the original object or * by the toString representation of that object. To handle this case the original object is * inserted with the toString as the value. That string should then be looked up to get the * actual id. This is done to avoid excessive toString calls during encoding. */ private final WeakHashMap<Object, Object> map = new WeakHashMap<>(); private final Object[] keys = new Object[CONSTANT_POOL_MAX_SIZE]; ConstantPool() { } private static Object getLookupKey(Object key) { // Collections must be converted to a String early since they can be mutated after // being inserted into the map. return (key instanceof Collection) ? key.toString() : key; } Character get(Object initialKey, int type) { Object key = getLookupKey(initialKey); Object value = map.get(key); if (value instanceof String) { Character id = (Character) map.get(value); if (id != null && keys[id].equals(value)) { assert checkToString(key, value); return id; } value = null; } Character id = (Character) value; if (id != null && keys[id].equals(key)) { return id; } if (type == POOL_STRING && !(key instanceof String)) { // See if the String representation is already in the map String string = key.toString(); id = get(string, type); if (id != null) { // Add an entry that forwards from the object to the string. map.put(key, string); return id; } } return null; } char add(Object initialKey, int type) { char id = nextId++; if (nextId == CONSTANT_POOL_MAX_SIZE) { nextId = 0; } if (keys[id] != null) { map.remove(keys[id]); } Object key = getLookupKey(initialKey); if (type == POOL_STRING && !(key instanceof String)) { // Insert a forwarding entry from the original object to the string representation // and then directly insert the string with the pool id. String string = key.toString(); map.put(key, string); map.put(string, id); keys[id] = string; } else { map.put(key, id); keys[id] = key; } return id; } void reset() { map.clear(); Arrays.fill(keys, null); nextId = 0; } } }
googleapis/google-cloud-java
36,121
java-service-management/google-cloud-service-management/src/test/java/com/google/cloud/api/servicemanagement/v1/ServiceManagerClientHttpJsonTest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.api.servicemanagement.v1; import static com.google.cloud.api.servicemanagement.v1.ServiceManagerClient.ListServiceConfigsPagedResponse; import static com.google.cloud.api.servicemanagement.v1.ServiceManagerClient.ListServiceRolloutsPagedResponse; import static com.google.cloud.api.servicemanagement.v1.ServiceManagerClient.ListServicesPagedResponse; import com.google.api.Authentication; import com.google.api.Backend; import com.google.api.Billing; import com.google.api.Context; import com.google.api.Control; import com.google.api.Documentation; import com.google.api.Endpoint; import com.google.api.Http; import com.google.api.LogDescriptor; import com.google.api.Logging; import com.google.api.MetricDescriptor; import com.google.api.MonitoredResourceDescriptor; import com.google.api.Monitoring; import com.google.api.Publishing; import com.google.api.Quota; import com.google.api.Service; import com.google.api.SourceInfo; import com.google.api.SystemParameters; import com.google.api.Usage; import com.google.api.gax.core.NoCredentialsProvider; import com.google.api.gax.httpjson.GaxHttpJsonProperties; import com.google.api.gax.httpjson.testing.MockHttpService; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ApiException; import com.google.api.gax.rpc.ApiExceptionFactory; import com.google.api.gax.rpc.InvalidArgumentException; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.testing.FakeStatusCode; import com.google.api.servicemanagement.v1.ChangeReport; import com.google.api.servicemanagement.v1.ConfigSource; import com.google.api.servicemanagement.v1.Diagnostic; import com.google.api.servicemanagement.v1.GenerateConfigReportResponse; import com.google.api.servicemanagement.v1.GetServiceConfigRequest; import com.google.api.servicemanagement.v1.ListServiceConfigsResponse; import com.google.api.servicemanagement.v1.ListServiceRolloutsResponse; import com.google.api.servicemanagement.v1.ListServicesResponse; import com.google.api.servicemanagement.v1.ManagedService; import com.google.api.servicemanagement.v1.Rollout; import com.google.api.servicemanagement.v1.SubmitConfigSourceResponse; import com.google.api.servicemanagement.v1.UndeleteServiceResponse; import com.google.cloud.api.servicemanagement.v1.stub.HttpJsonServiceManagerStub; import com.google.common.collect.Lists; import com.google.iam.v1.AuditConfig; import com.google.iam.v1.Binding; import com.google.iam.v1.GetIamPolicyRequest; import com.google.iam.v1.GetPolicyOptions; import com.google.iam.v1.Policy; import com.google.iam.v1.SetIamPolicyRequest; import com.google.iam.v1.TestIamPermissionsRequest; import com.google.iam.v1.TestIamPermissionsResponse; import com.google.longrunning.Operation; import com.google.protobuf.Any; import com.google.protobuf.Api; import com.google.protobuf.ByteString; import com.google.protobuf.Empty; import com.google.protobuf.Enum; import com.google.protobuf.FieldMask; import com.google.protobuf.Timestamp; import com.google.protobuf.Type; import com.google.protobuf.UInt32Value; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.concurrent.ExecutionException; import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; @Generated("by gapic-generator-java") public class ServiceManagerClientHttpJsonTest { private static MockHttpService mockService; private static ServiceManagerClient client; @BeforeClass public static void startStaticServer() throws IOException { mockService = new MockHttpService( HttpJsonServiceManagerStub.getMethodDescriptors(), ServiceManagerSettings.getDefaultEndpoint()); ServiceManagerSettings settings = ServiceManagerSettings.newHttpJsonBuilder() .setTransportChannelProvider( ServiceManagerSettings.defaultHttpJsonTransportProviderBuilder() .setHttpTransport(mockService) .build()) .setCredentialsProvider(NoCredentialsProvider.create()) .build(); client = ServiceManagerClient.create(settings); } @AfterClass public static void stopServer() { client.close(); } @Before public void setUp() {} @After public void tearDown() throws Exception { mockService.reset(); } @Test public void listServicesTest() throws Exception { ManagedService responsesElement = ManagedService.newBuilder().build(); ListServicesResponse expectedResponse = ListServicesResponse.newBuilder() .setNextPageToken("") .addAllServices(Arrays.asList(responsesElement)) .build(); mockService.addResponse(expectedResponse); String producerProjectId = "producerProjectId-1297373534"; String consumerId = "consumerId-166238287"; ListServicesPagedResponse pagedListResponse = client.listServices(producerProjectId, consumerId); List<ManagedService> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getServicesList().get(0), resources.get(0)); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void listServicesExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String producerProjectId = "producerProjectId-1297373534"; String consumerId = "consumerId-166238287"; client.listServices(producerProjectId, consumerId); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getServiceTest() throws Exception { ManagedService expectedResponse = ManagedService.newBuilder() .setServiceName("serviceName-1928572192") .setProducerProjectId("producerProjectId-1297373534") .build(); mockService.addResponse(expectedResponse); String serviceName = "serviceName-4234"; ManagedService actualResponse = client.getService(serviceName); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void getServiceExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String serviceName = "serviceName-4234"; client.getService(serviceName); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void createServiceTest() throws Exception { ManagedService expectedResponse = ManagedService.newBuilder() .setServiceName("serviceName-1928572192") .setProducerProjectId("producerProjectId-1297373534") .build(); Operation resultOperation = Operation.newBuilder() .setName("createServiceTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockService.addResponse(resultOperation); ManagedService service = ManagedService.newBuilder().build(); ManagedService actualResponse = client.createServiceAsync(service).get(); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void createServiceExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { ManagedService service = ManagedService.newBuilder().build(); client.createServiceAsync(service).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { } } @Test public void deleteServiceTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); Operation resultOperation = Operation.newBuilder() .setName("deleteServiceTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockService.addResponse(resultOperation); String serviceName = "serviceName-4234"; client.deleteServiceAsync(serviceName).get(); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void deleteServiceExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String serviceName = "serviceName-4234"; client.deleteServiceAsync(serviceName).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { } } @Test public void undeleteServiceTest() throws Exception { UndeleteServiceResponse expectedResponse = UndeleteServiceResponse.newBuilder() .setService(ManagedService.newBuilder().build()) .build(); Operation resultOperation = Operation.newBuilder() .setName("undeleteServiceTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockService.addResponse(resultOperation); String serviceName = "serviceName-4234"; UndeleteServiceResponse actualResponse = client.undeleteServiceAsync(serviceName).get(); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void undeleteServiceExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String serviceName = "serviceName-4234"; client.undeleteServiceAsync(serviceName).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { } } @Test public void listServiceConfigsTest() throws Exception { Service responsesElement = Service.newBuilder().build(); ListServiceConfigsResponse expectedResponse = ListServiceConfigsResponse.newBuilder() .setNextPageToken("") .addAllServiceConfigs(Arrays.asList(responsesElement)) .build(); mockService.addResponse(expectedResponse); String serviceName = "serviceName-4234"; ListServiceConfigsPagedResponse pagedListResponse = client.listServiceConfigs(serviceName); List<Service> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getServiceConfigsList().get(0), resources.get(0)); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void listServiceConfigsExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String serviceName = "serviceName-4234"; client.listServiceConfigs(serviceName); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getServiceConfigTest() throws Exception { Service expectedResponse = Service.newBuilder() .setName("name3373707") .setTitle("title110371416") .setProducerProjectId("producerProjectId-1297373534") .setId("id3355") .addAllApis(new ArrayList<Api>()) .addAllTypes(new ArrayList<Type>()) .addAllEnums(new ArrayList<Enum>()) .setDocumentation(Documentation.newBuilder().build()) .setBackend(Backend.newBuilder().build()) .setHttp(Http.newBuilder().build()) .setQuota(Quota.newBuilder().build()) .setAuthentication(Authentication.newBuilder().build()) .setContext(Context.newBuilder().build()) .setUsage(Usage.newBuilder().build()) .addAllEndpoints(new ArrayList<Endpoint>()) .setControl(Control.newBuilder().build()) .addAllLogs(new ArrayList<LogDescriptor>()) .addAllMetrics(new ArrayList<MetricDescriptor>()) .addAllMonitoredResources(new ArrayList<MonitoredResourceDescriptor>()) .setBilling(Billing.newBuilder().build()) .setLogging(Logging.newBuilder().build()) .setMonitoring(Monitoring.newBuilder().build()) .setSystemParameters(SystemParameters.newBuilder().build()) .setSourceInfo(SourceInfo.newBuilder().build()) .setPublishing(Publishing.newBuilder().build()) .setConfigVersion(UInt32Value.newBuilder().build()) .build(); mockService.addResponse(expectedResponse); String serviceName = "serviceName-4234"; String configId = "configId-1859"; GetServiceConfigRequest.ConfigView view = GetServiceConfigRequest.ConfigView.forNumber(0); Service actualResponse = client.getServiceConfig(serviceName, configId, view); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void getServiceConfigExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String serviceName = "serviceName-4234"; String configId = "configId-1859"; GetServiceConfigRequest.ConfigView view = GetServiceConfigRequest.ConfigView.forNumber(0); client.getServiceConfig(serviceName, configId, view); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void createServiceConfigTest() throws Exception { Service expectedResponse = Service.newBuilder() .setName("name3373707") .setTitle("title110371416") .setProducerProjectId("producerProjectId-1297373534") .setId("id3355") .addAllApis(new ArrayList<Api>()) .addAllTypes(new ArrayList<Type>()) .addAllEnums(new ArrayList<Enum>()) .setDocumentation(Documentation.newBuilder().build()) .setBackend(Backend.newBuilder().build()) .setHttp(Http.newBuilder().build()) .setQuota(Quota.newBuilder().build()) .setAuthentication(Authentication.newBuilder().build()) .setContext(Context.newBuilder().build()) .setUsage(Usage.newBuilder().build()) .addAllEndpoints(new ArrayList<Endpoint>()) .setControl(Control.newBuilder().build()) .addAllLogs(new ArrayList<LogDescriptor>()) .addAllMetrics(new ArrayList<MetricDescriptor>()) .addAllMonitoredResources(new ArrayList<MonitoredResourceDescriptor>()) .setBilling(Billing.newBuilder().build()) .setLogging(Logging.newBuilder().build()) .setMonitoring(Monitoring.newBuilder().build()) .setSystemParameters(SystemParameters.newBuilder().build()) .setSourceInfo(SourceInfo.newBuilder().build()) .setPublishing(Publishing.newBuilder().build()) .setConfigVersion(UInt32Value.newBuilder().build()) .build(); mockService.addResponse(expectedResponse); String serviceName = "serviceName-4234"; Service serviceConfig = Service.newBuilder().build(); Service actualResponse = client.createServiceConfig(serviceName, serviceConfig); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void createServiceConfigExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String serviceName = "serviceName-4234"; Service serviceConfig = Service.newBuilder().build(); client.createServiceConfig(serviceName, serviceConfig); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void submitConfigSourceTest() throws Exception { SubmitConfigSourceResponse expectedResponse = SubmitConfigSourceResponse.newBuilder() .setServiceConfig(Service.newBuilder().build()) .build(); Operation resultOperation = Operation.newBuilder() .setName("submitConfigSourceTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockService.addResponse(resultOperation); String serviceName = "serviceName-4234"; ConfigSource configSource = ConfigSource.newBuilder().build(); boolean validateOnly = true; SubmitConfigSourceResponse actualResponse = client.submitConfigSourceAsync(serviceName, configSource, validateOnly).get(); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void submitConfigSourceExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String serviceName = "serviceName-4234"; ConfigSource configSource = ConfigSource.newBuilder().build(); boolean validateOnly = true; client.submitConfigSourceAsync(serviceName, configSource, validateOnly).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { } } @Test public void listServiceRolloutsTest() throws Exception { Rollout responsesElement = Rollout.newBuilder().build(); ListServiceRolloutsResponse expectedResponse = ListServiceRolloutsResponse.newBuilder() .setNextPageToken("") .addAllRollouts(Arrays.asList(responsesElement)) .build(); mockService.addResponse(expectedResponse); String serviceName = "serviceName-4234"; String filter = "filter-1274492040"; ListServiceRolloutsPagedResponse pagedListResponse = client.listServiceRollouts(serviceName, filter); List<Rollout> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getRolloutsList().get(0), resources.get(0)); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void listServiceRolloutsExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String serviceName = "serviceName-4234"; String filter = "filter-1274492040"; client.listServiceRollouts(serviceName, filter); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getServiceRolloutTest() throws Exception { Rollout expectedResponse = Rollout.newBuilder() .setRolloutId("rolloutId551248556") .setCreateTime(Timestamp.newBuilder().build()) .setCreatedBy("createdBy598371679") .setServiceName("serviceName-1928572192") .build(); mockService.addResponse(expectedResponse); String serviceName = "serviceName-4234"; String rolloutId = "rolloutId-3906"; Rollout actualResponse = client.getServiceRollout(serviceName, rolloutId); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void getServiceRolloutExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String serviceName = "serviceName-4234"; String rolloutId = "rolloutId-3906"; client.getServiceRollout(serviceName, rolloutId); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void createServiceRolloutTest() throws Exception { Rollout expectedResponse = Rollout.newBuilder() .setRolloutId("rolloutId551248556") .setCreateTime(Timestamp.newBuilder().build()) .setCreatedBy("createdBy598371679") .setServiceName("serviceName-1928572192") .build(); Operation resultOperation = Operation.newBuilder() .setName("createServiceRolloutTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockService.addResponse(resultOperation); String serviceName = "serviceName-4234"; Rollout rollout = Rollout.newBuilder().build(); Rollout actualResponse = client.createServiceRolloutAsync(serviceName, rollout).get(); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void createServiceRolloutExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String serviceName = "serviceName-4234"; Rollout rollout = Rollout.newBuilder().build(); client.createServiceRolloutAsync(serviceName, rollout).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { } } @Test public void generateConfigReportTest() throws Exception { GenerateConfigReportResponse expectedResponse = GenerateConfigReportResponse.newBuilder() .setServiceName("serviceName-1928572192") .setId("id3355") .addAllChangeReports(new ArrayList<ChangeReport>()) .addAllDiagnostics(new ArrayList<Diagnostic>()) .build(); mockService.addResponse(expectedResponse); Any newConfig = Any.newBuilder().build(); Any oldConfig = Any.newBuilder().build(); GenerateConfigReportResponse actualResponse = client.generateConfigReport(newConfig, oldConfig); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void generateConfigReportExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { Any newConfig = Any.newBuilder().build(); Any oldConfig = Any.newBuilder().build(); client.generateConfigReport(newConfig, oldConfig); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Ignore("See: https://github.com/googleapis/sdk-platform-java/issues/1839") @Test public void setIamPolicyTest() throws Exception { Policy expectedResponse = Policy.newBuilder() .setVersion(351608024) .addAllBindings(new ArrayList<Binding>()) .addAllAuditConfigs(new ArrayList<AuditConfig>()) .setEtag(ByteString.EMPTY) .build(); mockService.addResponse(expectedResponse); SetIamPolicyRequest request = SetIamPolicyRequest.newBuilder() .setResource("SetIamPolicyRequest1223629066".toString()) .setPolicy(Policy.newBuilder().build()) .setUpdateMask(FieldMask.newBuilder().build()) .build(); Policy actualResponse = client.setIamPolicy(request); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void setIamPolicyExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { SetIamPolicyRequest request = SetIamPolicyRequest.newBuilder() .setResource("SetIamPolicyRequest1223629066".toString()) .setPolicy(Policy.newBuilder().build()) .setUpdateMask(FieldMask.newBuilder().build()) .build(); client.setIamPolicy(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Ignore("See: https://github.com/googleapis/sdk-platform-java/issues/1839") @Test public void getIamPolicyTest() throws Exception { Policy expectedResponse = Policy.newBuilder() .setVersion(351608024) .addAllBindings(new ArrayList<Binding>()) .addAllAuditConfigs(new ArrayList<AuditConfig>()) .setEtag(ByteString.EMPTY) .build(); mockService.addResponse(expectedResponse); GetIamPolicyRequest request = GetIamPolicyRequest.newBuilder() .setResource("GetIamPolicyRequest-1527610370".toString()) .setOptions(GetPolicyOptions.newBuilder().build()) .build(); Policy actualResponse = client.getIamPolicy(request); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void getIamPolicyExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { GetIamPolicyRequest request = GetIamPolicyRequest.newBuilder() .setResource("GetIamPolicyRequest-1527610370".toString()) .setOptions(GetPolicyOptions.newBuilder().build()) .build(); client.getIamPolicy(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Ignore("See: https://github.com/googleapis/sdk-platform-java/issues/1839") @Test public void testIamPermissionsTest() throws Exception { TestIamPermissionsResponse expectedResponse = TestIamPermissionsResponse.newBuilder().addAllPermissions(new ArrayList<String>()).build(); mockService.addResponse(expectedResponse); TestIamPermissionsRequest request = TestIamPermissionsRequest.newBuilder() .setResource("TestIamPermissionsRequest942398222".toString()) .addAllPermissions(new ArrayList<String>()) .build(); TestIamPermissionsResponse actualResponse = client.testIamPermissions(request); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void testIamPermissionsExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { TestIamPermissionsRequest request = TestIamPermissionsRequest.newBuilder() .setResource("TestIamPermissionsRequest942398222".toString()) .addAllPermissions(new ArrayList<String>()) .build(); client.testIamPermissions(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } }
googleapis/google-cloud-java
36,208
java-beyondcorp-appgateways/proto-google-cloud-beyondcorp-appgateways-v1/src/main/java/com/google/cloud/beyondcorp/appgateways/v1/DeleteAppGatewayRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/beyondcorp/appgateways/v1/app_gateways_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.beyondcorp.appgateways.v1; /** * * * <pre> * Request message for BeyondCorp.DeleteAppGateway. * </pre> * * Protobuf type {@code google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest} */ public final class DeleteAppGatewayRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest) DeleteAppGatewayRequestOrBuilder { private static final long serialVersionUID = 0L; // Use DeleteAppGatewayRequest.newBuilder() to construct. private DeleteAppGatewayRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DeleteAppGatewayRequest() { name_ = ""; requestId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new DeleteAppGatewayRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.beyondcorp.appgateways.v1.AppGatewaysServiceProto .internal_static_google_cloud_beyondcorp_appgateways_v1_DeleteAppGatewayRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.beyondcorp.appgateways.v1.AppGatewaysServiceProto .internal_static_google_cloud_beyondcorp_appgateways_v1_DeleteAppGatewayRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest.class, com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * Required. BeyondCorp AppGateway name using the form: * `projects/{project_id}/locations/{location_id}/appGateways/{app_gateway_id}` * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Required. BeyondCorp AppGateway name using the form: * `projects/{project_id}/locations/{location_id}/appGateways/{app_gateway_id}` * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int REQUEST_ID_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and t * he request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The requestId. */ @java.lang.Override public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and t * he request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for requestId. */ @java.lang.Override public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int VALIDATE_ONLY_FIELD_NUMBER = 3; private boolean validateOnly_ = false; /** * * * <pre> * Optional. If set, validates request by executing a dry-run which would not * alter the resource in any way. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, requestId_); } if (validateOnly_ != false) { output.writeBool(3, validateOnly_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, requestId_); } if (validateOnly_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, validateOnly_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest)) { return super.equals(obj); } com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest other = (com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest) obj; if (!getName().equals(other.getName())) return false; if (!getRequestId().equals(other.getRequestId())) return false; if (getValidateOnly() != other.getValidateOnly()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER; hash = (53 * hash) + getRequestId().hashCode(); hash = (37 * hash) + VALIDATE_ONLY_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getValidateOnly()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for BeyondCorp.DeleteAppGateway. * </pre> * * Protobuf type {@code google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest) com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.beyondcorp.appgateways.v1.AppGatewaysServiceProto .internal_static_google_cloud_beyondcorp_appgateways_v1_DeleteAppGatewayRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.beyondcorp.appgateways.v1.AppGatewaysServiceProto .internal_static_google_cloud_beyondcorp_appgateways_v1_DeleteAppGatewayRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest.class, com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest.Builder.class); } // Construct using // com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; requestId_ = ""; validateOnly_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.beyondcorp.appgateways.v1.AppGatewaysServiceProto .internal_static_google_cloud_beyondcorp_appgateways_v1_DeleteAppGatewayRequest_descriptor; } @java.lang.Override public com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest getDefaultInstanceForType() { return com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest build() { com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest buildPartial() { com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest result = new com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.requestId_ = requestId_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.validateOnly_ = validateOnly_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest) { return mergeFrom( (com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest other) { if (other == com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest .getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getRequestId().isEmpty()) { requestId_ = other.requestId_; bitField0_ |= 0x00000002; onChanged(); } if (other.getValidateOnly() != false) { setValidateOnly(other.getValidateOnly()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { requestId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { validateOnly_ = input.readBool(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * Required. BeyondCorp AppGateway name using the form: * `projects/{project_id}/locations/{location_id}/appGateways/{app_gateway_id}` * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. BeyondCorp AppGateway name using the form: * `projects/{project_id}/locations/{location_id}/appGateways/{app_gateway_id}` * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. BeyondCorp AppGateway name using the form: * `projects/{project_id}/locations/{location_id}/appGateways/{app_gateway_id}` * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. BeyondCorp AppGateway name using the form: * `projects/{project_id}/locations/{location_id}/appGateways/{app_gateway_id}` * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. BeyondCorp AppGateway name using the form: * `projects/{project_id}/locations/{location_id}/appGateways/{app_gateway_id}` * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and t * he request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The requestId. */ public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and t * he request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for requestId. */ public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and t * he request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The requestId to set. * @return This builder for chaining. */ public Builder setRequestId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } requestId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and t * he request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearRequestId() { requestId_ = getDefaultInstance().getRequestId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and t * he request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for requestId to set. * @return This builder for chaining. */ public Builder setRequestIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); requestId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private boolean validateOnly_; /** * * * <pre> * Optional. If set, validates request by executing a dry-run which would not * alter the resource in any way. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } /** * * * <pre> * Optional. If set, validates request by executing a dry-run which would not * alter the resource in any way. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The validateOnly to set. * @return This builder for chaining. */ public Builder setValidateOnly(boolean value) { validateOnly_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. If set, validates request by executing a dry-run which would not * alter the resource in any way. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearValidateOnly() { bitField0_ = (bitField0_ & ~0x00000004); validateOnly_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest) } // @@protoc_insertion_point(class_scope:google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest) private static final com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest(); } public static com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DeleteAppGatewayRequest> PARSER = new com.google.protobuf.AbstractParser<DeleteAppGatewayRequest>() { @java.lang.Override public DeleteAppGatewayRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<DeleteAppGatewayRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DeleteAppGatewayRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.beyondcorp.appgateways.v1.DeleteAppGatewayRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,065
java-shopping-merchant-accounts/proto-google-shopping-merchant-accounts-v1/src/main/java/com/google/shopping/merchant/accounts/v1/CustomerService.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/shopping/merchant/accounts/v1/customerservice.proto // Protobuf Java Version: 3.25.8 package com.google.shopping.merchant.accounts.v1; /** * * * <pre> * Customer service information. * </pre> * * Protobuf type {@code google.shopping.merchant.accounts.v1.CustomerService} */ public final class CustomerService extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.shopping.merchant.accounts.v1.CustomerService) CustomerServiceOrBuilder { private static final long serialVersionUID = 0L; // Use CustomerService.newBuilder() to construct. private CustomerService(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CustomerService() { uri_ = ""; email_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CustomerService(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.accounts.v1.CustomerServiceProto .internal_static_google_shopping_merchant_accounts_v1_CustomerService_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.accounts.v1.CustomerServiceProto .internal_static_google_shopping_merchant_accounts_v1_CustomerService_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.accounts.v1.CustomerService.class, com.google.shopping.merchant.accounts.v1.CustomerService.Builder.class); } private int bitField0_; public static final int URI_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object uri_ = ""; /** * * * <pre> * Optional. The URI where customer service may be found. * </pre> * * <code>optional string uri = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return Whether the uri field is set. */ @java.lang.Override public boolean hasUri() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Optional. The URI where customer service may be found. * </pre> * * <code>optional string uri = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The uri. */ @java.lang.Override public java.lang.String getUri() { java.lang.Object ref = uri_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); uri_ = s; return s; } } /** * * * <pre> * Optional. The URI where customer service may be found. * </pre> * * <code>optional string uri = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for uri. */ @java.lang.Override public com.google.protobuf.ByteString getUriBytes() { java.lang.Object ref = uri_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); uri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int EMAIL_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object email_ = ""; /** * * * <pre> * Optional. The email address where customer service may be reached. * </pre> * * <code>optional string email = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return Whether the email field is set. */ @java.lang.Override public boolean hasEmail() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. The email address where customer service may be reached. * </pre> * * <code>optional string email = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The email. */ @java.lang.Override public java.lang.String getEmail() { java.lang.Object ref = email_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); email_ = s; return s; } } /** * * * <pre> * Optional. The email address where customer service may be reached. * </pre> * * <code>optional string email = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for email. */ @java.lang.Override public com.google.protobuf.ByteString getEmailBytes() { java.lang.Object ref = email_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); email_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PHONE_FIELD_NUMBER = 3; private com.google.type.PhoneNumber phone_; /** * * * <pre> * Optional. The phone number where customer service may be called. * </pre> * * <code>optional .google.type.PhoneNumber phone = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the phone field is set. */ @java.lang.Override public boolean hasPhone() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Optional. The phone number where customer service may be called. * </pre> * * <code>optional .google.type.PhoneNumber phone = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The phone. */ @java.lang.Override public com.google.type.PhoneNumber getPhone() { return phone_ == null ? com.google.type.PhoneNumber.getDefaultInstance() : phone_; } /** * * * <pre> * Optional. The phone number where customer service may be called. * </pre> * * <code>optional .google.type.PhoneNumber phone = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public com.google.type.PhoneNumberOrBuilder getPhoneOrBuilder() { return phone_ == null ? com.google.type.PhoneNumber.getDefaultInstance() : phone_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, uri_); } if (((bitField0_ & 0x00000002) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, email_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeMessage(3, getPhone()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, uri_); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, email_); } if (((bitField0_ & 0x00000004) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getPhone()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.shopping.merchant.accounts.v1.CustomerService)) { return super.equals(obj); } com.google.shopping.merchant.accounts.v1.CustomerService other = (com.google.shopping.merchant.accounts.v1.CustomerService) obj; if (hasUri() != other.hasUri()) return false; if (hasUri()) { if (!getUri().equals(other.getUri())) return false; } if (hasEmail() != other.hasEmail()) return false; if (hasEmail()) { if (!getEmail().equals(other.getEmail())) return false; } if (hasPhone() != other.hasPhone()) return false; if (hasPhone()) { if (!getPhone().equals(other.getPhone())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasUri()) { hash = (37 * hash) + URI_FIELD_NUMBER; hash = (53 * hash) + getUri().hashCode(); } if (hasEmail()) { hash = (37 * hash) + EMAIL_FIELD_NUMBER; hash = (53 * hash) + getEmail().hashCode(); } if (hasPhone()) { hash = (37 * hash) + PHONE_FIELD_NUMBER; hash = (53 * hash) + getPhone().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.shopping.merchant.accounts.v1.CustomerService parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.accounts.v1.CustomerService parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1.CustomerService parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.accounts.v1.CustomerService parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1.CustomerService parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.accounts.v1.CustomerService parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1.CustomerService parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.accounts.v1.CustomerService parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1.CustomerService parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.shopping.merchant.accounts.v1.CustomerService parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1.CustomerService parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.accounts.v1.CustomerService parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.shopping.merchant.accounts.v1.CustomerService prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Customer service information. * </pre> * * Protobuf type {@code google.shopping.merchant.accounts.v1.CustomerService} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.shopping.merchant.accounts.v1.CustomerService) com.google.shopping.merchant.accounts.v1.CustomerServiceOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.accounts.v1.CustomerServiceProto .internal_static_google_shopping_merchant_accounts_v1_CustomerService_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.accounts.v1.CustomerServiceProto .internal_static_google_shopping_merchant_accounts_v1_CustomerService_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.accounts.v1.CustomerService.class, com.google.shopping.merchant.accounts.v1.CustomerService.Builder.class); } // Construct using com.google.shopping.merchant.accounts.v1.CustomerService.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getPhoneFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; uri_ = ""; email_ = ""; phone_ = null; if (phoneBuilder_ != null) { phoneBuilder_.dispose(); phoneBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.shopping.merchant.accounts.v1.CustomerServiceProto .internal_static_google_shopping_merchant_accounts_v1_CustomerService_descriptor; } @java.lang.Override public com.google.shopping.merchant.accounts.v1.CustomerService getDefaultInstanceForType() { return com.google.shopping.merchant.accounts.v1.CustomerService.getDefaultInstance(); } @java.lang.Override public com.google.shopping.merchant.accounts.v1.CustomerService build() { com.google.shopping.merchant.accounts.v1.CustomerService result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.shopping.merchant.accounts.v1.CustomerService buildPartial() { com.google.shopping.merchant.accounts.v1.CustomerService result = new com.google.shopping.merchant.accounts.v1.CustomerService(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.shopping.merchant.accounts.v1.CustomerService result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.uri_ = uri_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.email_ = email_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.phone_ = phoneBuilder_ == null ? phone_ : phoneBuilder_.build(); to_bitField0_ |= 0x00000004; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.shopping.merchant.accounts.v1.CustomerService) { return mergeFrom((com.google.shopping.merchant.accounts.v1.CustomerService) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.shopping.merchant.accounts.v1.CustomerService other) { if (other == com.google.shopping.merchant.accounts.v1.CustomerService.getDefaultInstance()) return this; if (other.hasUri()) { uri_ = other.uri_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasEmail()) { email_ = other.email_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasPhone()) { mergePhone(other.getPhone()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { uri_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { email_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { input.readMessage(getPhoneFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object uri_ = ""; /** * * * <pre> * Optional. The URI where customer service may be found. * </pre> * * <code>optional string uri = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return Whether the uri field is set. */ public boolean hasUri() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Optional. The URI where customer service may be found. * </pre> * * <code>optional string uri = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The uri. */ public java.lang.String getUri() { java.lang.Object ref = uri_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); uri_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. The URI where customer service may be found. * </pre> * * <code>optional string uri = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for uri. */ public com.google.protobuf.ByteString getUriBytes() { java.lang.Object ref = uri_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); uri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. The URI where customer service may be found. * </pre> * * <code>optional string uri = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The uri to set. * @return This builder for chaining. */ public Builder setUri(java.lang.String value) { if (value == null) { throw new NullPointerException(); } uri_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Optional. The URI where customer service may be found. * </pre> * * <code>optional string uri = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearUri() { uri_ = getDefaultInstance().getUri(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Optional. The URI where customer service may be found. * </pre> * * <code>optional string uri = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for uri to set. * @return This builder for chaining. */ public Builder setUriBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); uri_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object email_ = ""; /** * * * <pre> * Optional. The email address where customer service may be reached. * </pre> * * <code>optional string email = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return Whether the email field is set. */ public boolean hasEmail() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. The email address where customer service may be reached. * </pre> * * <code>optional string email = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The email. */ public java.lang.String getEmail() { java.lang.Object ref = email_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); email_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. The email address where customer service may be reached. * </pre> * * <code>optional string email = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for email. */ public com.google.protobuf.ByteString getEmailBytes() { java.lang.Object ref = email_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); email_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. The email address where customer service may be reached. * </pre> * * <code>optional string email = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The email to set. * @return This builder for chaining. */ public Builder setEmail(java.lang.String value) { if (value == null) { throw new NullPointerException(); } email_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. The email address where customer service may be reached. * </pre> * * <code>optional string email = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearEmail() { email_ = getDefaultInstance().getEmail(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Optional. The email address where customer service may be reached. * </pre> * * <code>optional string email = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for email to set. * @return This builder for chaining. */ public Builder setEmailBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); email_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private com.google.type.PhoneNumber phone_; private com.google.protobuf.SingleFieldBuilderV3< com.google.type.PhoneNumber, com.google.type.PhoneNumber.Builder, com.google.type.PhoneNumberOrBuilder> phoneBuilder_; /** * * * <pre> * Optional. The phone number where customer service may be called. * </pre> * * <code>optional .google.type.PhoneNumber phone = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the phone field is set. */ public boolean hasPhone() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Optional. The phone number where customer service may be called. * </pre> * * <code>optional .google.type.PhoneNumber phone = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The phone. */ public com.google.type.PhoneNumber getPhone() { if (phoneBuilder_ == null) { return phone_ == null ? com.google.type.PhoneNumber.getDefaultInstance() : phone_; } else { return phoneBuilder_.getMessage(); } } /** * * * <pre> * Optional. The phone number where customer service may be called. * </pre> * * <code>optional .google.type.PhoneNumber phone = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setPhone(com.google.type.PhoneNumber value) { if (phoneBuilder_ == null) { if (value == null) { throw new NullPointerException(); } phone_ = value; } else { phoneBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. The phone number where customer service may be called. * </pre> * * <code>optional .google.type.PhoneNumber phone = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setPhone(com.google.type.PhoneNumber.Builder builderForValue) { if (phoneBuilder_ == null) { phone_ = builderForValue.build(); } else { phoneBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. The phone number where customer service may be called. * </pre> * * <code>optional .google.type.PhoneNumber phone = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder mergePhone(com.google.type.PhoneNumber value) { if (phoneBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && phone_ != null && phone_ != com.google.type.PhoneNumber.getDefaultInstance()) { getPhoneBuilder().mergeFrom(value); } else { phone_ = value; } } else { phoneBuilder_.mergeFrom(value); } if (phone_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * Optional. The phone number where customer service may be called. * </pre> * * <code>optional .google.type.PhoneNumber phone = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder clearPhone() { bitField0_ = (bitField0_ & ~0x00000004); phone_ = null; if (phoneBuilder_ != null) { phoneBuilder_.dispose(); phoneBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Optional. The phone number where customer service may be called. * </pre> * * <code>optional .google.type.PhoneNumber phone = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.type.PhoneNumber.Builder getPhoneBuilder() { bitField0_ |= 0x00000004; onChanged(); return getPhoneFieldBuilder().getBuilder(); } /** * * * <pre> * Optional. The phone number where customer service may be called. * </pre> * * <code>optional .google.type.PhoneNumber phone = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.type.PhoneNumberOrBuilder getPhoneOrBuilder() { if (phoneBuilder_ != null) { return phoneBuilder_.getMessageOrBuilder(); } else { return phone_ == null ? com.google.type.PhoneNumber.getDefaultInstance() : phone_; } } /** * * * <pre> * Optional. The phone number where customer service may be called. * </pre> * * <code>optional .google.type.PhoneNumber phone = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.type.PhoneNumber, com.google.type.PhoneNumber.Builder, com.google.type.PhoneNumberOrBuilder> getPhoneFieldBuilder() { if (phoneBuilder_ == null) { phoneBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.type.PhoneNumber, com.google.type.PhoneNumber.Builder, com.google.type.PhoneNumberOrBuilder>( getPhone(), getParentForChildren(), isClean()); phone_ = null; } return phoneBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.shopping.merchant.accounts.v1.CustomerService) } // @@protoc_insertion_point(class_scope:google.shopping.merchant.accounts.v1.CustomerService) private static final com.google.shopping.merchant.accounts.v1.CustomerService DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.shopping.merchant.accounts.v1.CustomerService(); } public static com.google.shopping.merchant.accounts.v1.CustomerService getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CustomerService> PARSER = new com.google.protobuf.AbstractParser<CustomerService>() { @java.lang.Override public CustomerService parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CustomerService> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CustomerService> getParserForType() { return PARSER; } @java.lang.Override public com.google.shopping.merchant.accounts.v1.CustomerService getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/flink
36,392
flink-yarn/src/main/java/org/apache/flink/yarn/Utils.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.yarn; import org.apache.flink.annotation.VisibleForTesting; import org.apache.flink.configuration.ConfigConstants; import org.apache.flink.configuration.ConfigOption; import org.apache.flink.configuration.ConfigUtils; import org.apache.flink.configuration.CoreOptions; import org.apache.flink.runtime.clusterframework.ContaineredTaskManagerParameters; import org.apache.flink.runtime.clusterframework.TaskExecutorProcessSpec; import org.apache.flink.runtime.clusterframework.TaskExecutorProcessUtils; import org.apache.flink.runtime.util.HadoopUtils; import org.apache.flink.runtime.util.config.memory.ProcessMemoryUtils; import org.apache.flink.util.StringUtils; import org.apache.flink.util.function.FunctionWithException; import org.apache.flink.yarn.configuration.YarnConfigOptions; import org.apache.flink.yarn.configuration.YarnResourceManagerDriverConfiguration; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.util.StringInterner; import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.LocalResourceType; import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.security.AMRMTokenIdentifier; import org.apache.hadoop.yarn.util.Records; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import static org.apache.flink.util.Preconditions.checkArgument; import static org.apache.flink.util.Preconditions.checkNotNull; import static org.apache.flink.yarn.YarnConfigKeys.ENV_FLINK_CLASSPATH; import static org.apache.flink.yarn.YarnConfigKeys.LOCAL_RESOURCE_DESCRIPTOR_SEPARATOR; import static org.apache.flink.yarn.configuration.YarnConfigOptions.YARN_CONTAINER_START_COMMAND_TEMPLATE; /** Utility class that provides helper methods to work with Apache Hadoop YARN. */ public final class Utils { private static final Logger LOG = LoggerFactory.getLogger(Utils.class); /** KRB5 file name populated in YARN container for secure IT run. */ public static final String KRB5_FILE_NAME = "krb5.conf"; /** Yarn site xml file name populated in YARN container for secure IT run. */ public static final String YARN_SITE_FILE_NAME = "yarn-site.xml"; /** Constant representing a wildcard access control list. */ private static final String WILDCARD_ACL = "*"; /** The prefixes that Flink adds to the YARN config. */ private static final String[] FLINK_CONFIG_PREFIXES = {"flink.yarn."}; @VisibleForTesting static final String YARN_RM_FAIR_SCHEDULER_CLAZZ = "org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler"; @VisibleForTesting static final String YARN_RM_SLS_FAIR_SCHEDULER_CLAZZ = "org.apache.hadoop.yarn.sls.scheduler.SLSFairScheduler"; @VisibleForTesting static final String YARN_RM_INCREMENT_ALLOCATION_MB_KEY = "yarn.resource-types.memory-mb.increment-allocation"; @VisibleForTesting static final String YARN_RM_INCREMENT_ALLOCATION_MB_LEGACY_KEY = "yarn.scheduler.increment-allocation-mb"; private static final int DEFAULT_YARN_RM_INCREMENT_ALLOCATION_MB = 1024; @VisibleForTesting static final String YARN_RM_INCREMENT_ALLOCATION_VCORES_KEY = "yarn.resource-types.vcores.increment-allocation"; @VisibleForTesting static final String YARN_RM_INCREMENT_ALLOCATION_VCORES_LEGACY_KEY = "yarn.scheduler.increment-allocation-vcores"; @VisibleForTesting static final String IGNORE_UNRECOGNIZED_VM_OPTIONS = "-XX:+IgnoreUnrecognizedVMOptions"; private static final int DEFAULT_YARN_RM_INCREMENT_ALLOCATION_VCORES = 1; public static void setupYarnClassPath(Configuration conf, Map<String, String> appMasterEnv) { addToEnvironment( appMasterEnv, Environment.CLASSPATH.name(), appMasterEnv.get(ENV_FLINK_CLASSPATH)); String[] applicationClassPathEntries = conf.getStrings( YarnConfiguration.YARN_APPLICATION_CLASSPATH, YarnConfiguration.DEFAULT_YARN_APPLICATION_CLASSPATH); for (String c : applicationClassPathEntries) { addToEnvironment(appMasterEnv, Environment.CLASSPATH.name(), c.trim()); } } /** * Deletes the YARN application files, e.g., Flink binaries, libraries, etc., from the remote * filesystem. * * @param applicationFilesDir The application files directory. */ public static void deleteApplicationFiles(final String applicationFilesDir) { if (!StringUtils.isNullOrWhitespaceOnly(applicationFilesDir)) { final org.apache.flink.core.fs.Path path = new org.apache.flink.core.fs.Path(applicationFilesDir); try { final org.apache.flink.core.fs.FileSystem fileSystem = path.getFileSystem(); if (!fileSystem.delete(path, true)) { LOG.error( "Deleting yarn application files under {} was unsuccessful.", applicationFilesDir); } } catch (final IOException e) { LOG.error( "Could not properly delete yarn application files directory {}.", applicationFilesDir, e); } } else { LOG.debug( "No yarn application files directory set. Therefore, cannot clean up the data."); } } /** * Creates a YARN resource for the remote object at the given location. * * @param remoteRsrcPath remote location of the resource * @param resourceSize size of the resource * @param resourceModificationTime last modification time of the resource * @return YARN resource */ static LocalResource registerLocalResource( Path remoteRsrcPath, long resourceSize, long resourceModificationTime, LocalResourceVisibility resourceVisibility, LocalResourceType resourceType) { LocalResource localResource = Records.newRecord(LocalResource.class); localResource.setResource(URL.fromURI(remoteRsrcPath.toUri())); localResource.setSize(resourceSize); localResource.setTimestamp(resourceModificationTime); localResource.setType(resourceType); localResource.setVisibility(resourceVisibility); return localResource; } /** * Creates a YARN resource for the remote object at the given location. * * @param fs remote filesystem * @param remoteRsrcPath resource path to be registered * @return YARN resource */ private static LocalResource registerLocalResource( FileSystem fs, Path remoteRsrcPath, LocalResourceType resourceType) throws IOException { FileStatus jarStat = fs.getFileStatus(remoteRsrcPath); return registerLocalResource( remoteRsrcPath, jarStat.getLen(), jarStat.getModificationTime(), LocalResourceVisibility.APPLICATION, resourceType); } /** * Copied method from org.apache.hadoop.yarn.util.Apps. It was broken by YARN-1824 (2.4.0) and * fixed for 2.4.1 by https://issues.apache.org/jira/browse/YARN-1931 */ public static void addToEnvironment( Map<String, String> environment, String variable, String value) { String val = environment.get(variable); if (val == null) { val = value; } else { val = val + File.pathSeparator + value; } environment.put(StringInterner.weakIntern(variable), StringInterner.weakIntern(val)); } /** * Resolve keytab path either as absolute path or relative to working directory. * * @param workingDir current working directory * @param keytabPath configured keytab path. * @return resolved keytab path, or null if not found. */ public static String resolveKeytabPath(String workingDir, String keytabPath) { String keytab = null; if (keytabPath != null) { File f; f = new File(keytabPath); if (f.exists()) { keytab = f.getAbsolutePath(); LOG.info("Resolved keytab path: {}", keytab); } else { // try using relative paths, this is the case when the keytab was shipped // as a local resource f = new File(workingDir, keytabPath); if (f.exists()) { keytab = f.getAbsolutePath(); LOG.info("Resolved keytab path: {}", keytab); } else { LOG.warn("Could not resolve keytab path with: {}", keytabPath); keytab = null; } } } return keytab; } /** Private constructor to prevent instantiation. */ private Utils() { throw new RuntimeException(); } /** * Creates the launch context, which describes how to bring up a TaskExecutor / TaskManager * process in an allocated YARN container. * * <p>This code is extremely YARN specific and registers all the resources that the TaskExecutor * needs (such as JAR file, config file, ...) and all environment variables in a YARN container * launch context. The launch context then ensures that those resources will be copied into the * containers transient working directory. * * @param flinkConfig The Flink configuration object. * @param yarnConfig The YARN configuration object. * @param configuration The YarnResourceManagerDriver configurations. * @param tmParams The TaskExecutor container memory parameters. * @param taskManagerDynamicProperties The dynamic configurations to be updated for the * TaskExecutors based on client uploaded Flink config. * @param workingDirectory The current application master container's working directory. * @param taskManagerMainClass The class with the main method. * @param log The logger. * @return The launch context for the TaskManager processes. * @throws Exception Thrown if the launch context could not be created, for example if the * resources could not be copied. */ static ContainerLaunchContext createTaskExecutorContext( org.apache.flink.configuration.Configuration flinkConfig, YarnConfiguration yarnConfig, YarnResourceManagerDriverConfiguration configuration, ContaineredTaskManagerParameters tmParams, String taskManagerDynamicProperties, String workingDirectory, Class<?> taskManagerMainClass, Logger log) throws Exception { // get and validate all relevant variables String remoteFlinkJarPath = checkNotNull( configuration.getFlinkDistJar(), "Environment variable %s not set", YarnConfigKeys.FLINK_DIST_JAR); String shipListString = checkNotNull( configuration.getClientShipFiles(), "Environment variable %s not set", YarnConfigKeys.ENV_CLIENT_SHIP_FILES); final String remoteKeytabPath = configuration.getRemoteKeytabPath(); final String localKeytabPath = configuration.getLocalKeytabPath(); final String keytabPrincipal = configuration.getKeytabPrinciple(); final String remoteYarnConfPath = configuration.getYarnSiteXMLPath(); final String remoteKrb5Path = configuration.getKrb5Path(); if (log.isDebugEnabled()) { log.debug("TM:remote keytab path obtained {}", remoteKeytabPath); log.debug("TM:local keytab path obtained {}", localKeytabPath); log.debug("TM:keytab principal obtained {}", keytabPrincipal); log.debug("TM:remote yarn conf path obtained {}", remoteYarnConfPath); log.debug("TM:remote krb5 path obtained {}", remoteKrb5Path); } String classPathString = checkNotNull( configuration.getFlinkClasspath(), "Environment variable %s not set", YarnConfigKeys.ENV_FLINK_CLASSPATH); // register keytab LocalResource keytabResource = null; if (remoteKeytabPath != null) { log.info( "TM:Adding keytab {} to the container local resource bucket", remoteKeytabPath); Path keytabPath = new Path(remoteKeytabPath); FileSystem fs = keytabPath.getFileSystem(yarnConfig); keytabResource = registerLocalResource(fs, keytabPath, LocalResourceType.FILE); } // To support Yarn Secure Integration Test Scenario LocalResource yarnConfResource = null; if (remoteYarnConfPath != null) { log.info( "TM:Adding remoteYarnConfPath {} to the container local resource bucket", remoteYarnConfPath); Path yarnConfPath = new Path(remoteYarnConfPath); FileSystem fs = yarnConfPath.getFileSystem(yarnConfig); yarnConfResource = registerLocalResource(fs, yarnConfPath, LocalResourceType.FILE); } // register krb5.conf LocalResource krb5ConfResource = null; boolean hasKrb5 = false; if (remoteKrb5Path != null) { log.info( "Adding remoteKrb5Path {} to the container local resource bucket", remoteKrb5Path); Path krb5ConfPath = new Path(remoteKrb5Path); FileSystem fs = krb5ConfPath.getFileSystem(yarnConfig); krb5ConfResource = registerLocalResource(fs, krb5ConfPath, LocalResourceType.FILE); hasKrb5 = true; } Map<String, LocalResource> taskManagerLocalResources = new HashMap<>(); // register Flink Jar with remote HDFS final YarnLocalResourceDescriptor flinkDistLocalResourceDesc = YarnLocalResourceDescriptor.fromString(remoteFlinkJarPath); taskManagerLocalResources.put( flinkDistLocalResourceDesc.getResourceKey(), flinkDistLocalResourceDesc.toLocalResource()); // To support Yarn Secure Integration Test Scenario if (yarnConfResource != null) { taskManagerLocalResources.put(YARN_SITE_FILE_NAME, yarnConfResource); } if (krb5ConfResource != null) { taskManagerLocalResources.put(KRB5_FILE_NAME, krb5ConfResource); } if (keytabResource != null) { taskManagerLocalResources.put(localKeytabPath, keytabResource); } // prepare additional files to be shipped decodeYarnLocalResourceDescriptorListFromString(shipListString) .forEach( resourceDesc -> taskManagerLocalResources.put( resourceDesc.getResourceKey(), resourceDesc.toLocalResource())); // now that all resources are prepared, we can create the launch context log.info("Creating container launch context for TaskManagers"); boolean hasLogback = new File(workingDirectory, "logback.xml").exists(); boolean hasLog4j = new File(workingDirectory, "log4j.properties").exists(); String launchCommand = getTaskManagerShellCommand( flinkConfig, tmParams, ".", ApplicationConstants.LOG_DIR_EXPANSION_VAR, hasLogback, hasLog4j, hasKrb5, taskManagerMainClass, taskManagerDynamicProperties); if (log.isDebugEnabled()) { log.debug("Starting TaskManagers with command: " + launchCommand); } else { log.info("Starting TaskManagers"); } ContainerLaunchContext ctx = Records.newRecord(ContainerLaunchContext.class); ctx.setCommands(Collections.singletonList(launchCommand)); ctx.setLocalResources(taskManagerLocalResources); Map<String, String> containerEnv = new HashMap<>(); containerEnv.putAll(tmParams.taskManagerEnv()); // add YARN classpath, etc to the container environment containerEnv.put(ENV_FLINK_CLASSPATH, classPathString); setupYarnClassPath(yarnConfig, containerEnv); containerEnv.put( YarnConfigKeys.ENV_HADOOP_USER_NAME, UserGroupInformation.getCurrentUser().getUserName()); if (remoteKeytabPath != null && localKeytabPath != null && keytabPrincipal != null) { containerEnv.put(YarnConfigKeys.REMOTE_KEYTAB_PATH, remoteKeytabPath); containerEnv.put(YarnConfigKeys.LOCAL_KEYTAB_PATH, localKeytabPath); containerEnv.put(YarnConfigKeys.KEYTAB_PRINCIPAL, keytabPrincipal); } else if (localKeytabPath != null && keytabPrincipal != null) { containerEnv.put(YarnConfigKeys.LOCAL_KEYTAB_PATH, localKeytabPath); containerEnv.put(YarnConfigKeys.KEYTAB_PRINCIPAL, keytabPrincipal); } ctx.setEnvironment(containerEnv); setAclsFor(ctx, flinkConfig); // For TaskManager YARN container context, read the tokens from the jobmanager yarn // container local file. // NOTE: must read the tokens from the local file, not from the UGI context, because if UGI // is login // using Kerberos keytabs, there is no HDFS delegation token in the UGI context. final String fileLocation = System.getenv(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION); if (fileLocation != null) { log.debug("Adding security tokens to TaskExecutor's container launch context."); try (DataOutputBuffer dob = new DataOutputBuffer()) { Credentials cred = Credentials.readTokenStorageFile( new File(fileLocation), HadoopUtils.getHadoopConfiguration(flinkConfig)); // Filter out AMRMToken before setting the tokens to the TaskManager container // context. Credentials taskManagerCred = new Credentials(); Collection<Token<? extends TokenIdentifier>> userTokens = cred.getAllTokens(); for (Token<? extends TokenIdentifier> token : userTokens) { if (!token.getKind().equals(AMRMTokenIdentifier.KIND_NAME)) { taskManagerCred.addToken(token.getService(), token); } } taskManagerCred.writeTokenStorageToStream(dob); ByteBuffer securityTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength()); ctx.setTokens(securityTokens); } catch (Throwable t) { log.error("Failed to add Hadoop's security tokens.", t); } } else { log.info( "Could not set security tokens because Hadoop's token file location is unknown."); } return ctx; } /** * Generates the shell command to start a task manager. * * @param flinkConfig The Flink configuration. * @param tmParams Parameters for the task manager. * @param configDirectory The configuration directory for the config.yaml * @param logDirectory The log directory. * @param hasLogback Uses logback? * @param hasLog4j Uses log4j? * @param mainClass The main class to start with. * @return A String containing the task manager startup command. */ public static String getTaskManagerShellCommand( org.apache.flink.configuration.Configuration flinkConfig, ContaineredTaskManagerParameters tmParams, String configDirectory, String logDirectory, boolean hasLogback, boolean hasLog4j, boolean hasKrb5, Class<?> mainClass, String mainArgs) { final Map<String, String> startCommandValues = new HashMap<>(); startCommandValues.put("java", "$JAVA_HOME/bin/java"); final TaskExecutorProcessSpec taskExecutorProcessSpec = tmParams.getTaskExecutorProcessSpec(); startCommandValues.put( "jvmmem", ProcessMemoryUtils.generateJvmParametersStr(taskExecutorProcessSpec)); List<ConfigOption<String>> jvmOptions = Arrays.asList( CoreOptions.FLINK_DEFAULT_JVM_OPTIONS, CoreOptions.FLINK_JVM_OPTIONS, CoreOptions.FLINK_DEFAULT_TM_JVM_OPTIONS, CoreOptions.FLINK_TM_JVM_OPTIONS); startCommandValues.put("jvmopts", generateJvmOptsString(flinkConfig, jvmOptions, hasKrb5)); String logging = ""; if (hasLogback || hasLog4j) { logging = "-Dlog.file=" + logDirectory + "/taskmanager.log"; if (hasLogback) { logging += " -Dlogback.configurationFile=file:" + configDirectory + "/logback.xml"; } if (hasLog4j) { logging += " -Dlog4j.configuration=file:" + configDirectory + "/log4j.properties"; logging += " -Dlog4j.configurationFile=file:" + configDirectory + "/log4j.properties"; } } startCommandValues.put("logging", logging); startCommandValues.put("class", mainClass.getName()); startCommandValues.put( "redirects", "1> " + logDirectory + "/taskmanager.out " + "2> " + logDirectory + "/taskmanager.err"); String argsStr = TaskExecutorProcessUtils.generateDynamicConfigsStr(taskExecutorProcessSpec) + " --configDir " + configDirectory; if (!mainArgs.isEmpty()) { argsStr += " " + mainArgs; } startCommandValues.put("args", argsStr); final String commandTemplate = flinkConfig.get(YARN_CONTAINER_START_COMMAND_TEMPLATE); String startCommand = getStartCommand(commandTemplate, startCommandValues); LOG.debug("TaskManager start command: " + startCommand); return startCommand; } /** * Replaces placeholders in the template start command with values from startCommandValues. * * <p>If the default template {@link * ConfigConstants#DEFAULT_YARN_CONTAINER_START_COMMAND_TEMPLATE} is used, the following keys * must be present in the map or the resulting command will still contain placeholders: * * <ul> * <li><tt>java</tt> = path to the Java executable * <li><tt>jvmmem</tt> = JVM memory limits and tweaks * <li><tt>jvmopts</tt> = misc options for the Java VM * <li><tt>logging</tt> = logging-related configuration settings * <li><tt>class</tt> = main class to execute * <li><tt>args</tt> = arguments for the main class * <li><tt>redirects</tt> = output redirects * </ul> * * @param template a template start command with placeholders * @param startCommandValues a replacement map <tt>placeholder -&gt; value</tt> * @return the start command with placeholders filled in */ public static String getStartCommand(String template, Map<String, String> startCommandValues) { for (Map.Entry<String, String> variable : startCommandValues.entrySet()) { template = template.replace("%" + variable.getKey() + "%", variable.getValue()) .replace(" ", " ") .trim(); } return template; } public static String generateJvmOptsString( org.apache.flink.configuration.Configuration conf, List<ConfigOption<String>> jvmOptions, boolean hasKrb5) { StringBuilder javaOptsSb = new StringBuilder(); for (ConfigOption<String> option : jvmOptions) { concatWithSpace(javaOptsSb, conf.get(option)); } concatWithSpace(javaOptsSb, IGNORE_UNRECOGNIZED_VM_OPTIONS); // krb5.conf file will be available as local resource in JM/TM container if (hasKrb5) { concatWithSpace(javaOptsSb, "-Djava.security.krb5.conf=krb5.conf"); } return javaOptsSb.toString().trim(); } static boolean isRemotePath(String path) throws IOException { org.apache.flink.core.fs.Path flinkPath = new org.apache.flink.core.fs.Path(path); return flinkPath.getFileSystem().isDistributedFS(); } private static List<YarnLocalResourceDescriptor> decodeYarnLocalResourceDescriptorListFromString(String resources) throws Exception { final List<YarnLocalResourceDescriptor> resourceDescriptors = new ArrayList<>(); for (String shipResourceDescStr : resources.split(LOCAL_RESOURCE_DESCRIPTOR_SEPARATOR)) { if (!shipResourceDescStr.isEmpty()) { resourceDescriptors.add( YarnLocalResourceDescriptor.fromString(shipResourceDescStr)); } } return resourceDescriptors; } @VisibleForTesting static Resource getUnitResource(YarnConfiguration yarnConfig) { final int unitMemMB, unitVcore; final String yarnRmSchedulerClazzName = yarnConfig.get(YarnConfiguration.RM_SCHEDULER); if (Objects.equals(yarnRmSchedulerClazzName, YARN_RM_FAIR_SCHEDULER_CLAZZ) || Objects.equals(yarnRmSchedulerClazzName, YARN_RM_SLS_FAIR_SCHEDULER_CLAZZ)) { String propMem = yarnConfig.get(YARN_RM_INCREMENT_ALLOCATION_MB_KEY); String propVcore = yarnConfig.get(YARN_RM_INCREMENT_ALLOCATION_VCORES_KEY); unitMemMB = propMem != null ? Integer.parseInt(propMem) : yarnConfig.getInt( YARN_RM_INCREMENT_ALLOCATION_MB_LEGACY_KEY, DEFAULT_YARN_RM_INCREMENT_ALLOCATION_MB); unitVcore = propVcore != null ? Integer.parseInt(propVcore) : yarnConfig.getInt( YARN_RM_INCREMENT_ALLOCATION_VCORES_LEGACY_KEY, DEFAULT_YARN_RM_INCREMENT_ALLOCATION_VCORES); } else { unitMemMB = yarnConfig.getInt( YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB); unitVcore = yarnConfig.getInt( YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES, YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES); } return Resource.newInstance(unitMemMB, unitVcore); } public static List<Path> getQualifiedRemoteProvidedLibDirs( org.apache.flink.configuration.Configuration configuration, YarnConfiguration yarnConfiguration) throws IOException { return getRemoteSharedLibPaths( configuration, pathStr -> { final Path path = new Path(pathStr); return path.getFileSystem(yarnConfiguration).makeQualified(path); }); } private static List<Path> getRemoteSharedLibPaths( org.apache.flink.configuration.Configuration configuration, FunctionWithException<String, Path, IOException> strToPathMapper) throws IOException { final List<Path> providedLibDirs = ConfigUtils.decodeListFromConfig( configuration, YarnConfigOptions.PROVIDED_LIB_DIRS, strToPathMapper); for (Path path : providedLibDirs) { if (!Utils.isRemotePath(path.toString())) { throw new IllegalArgumentException( "The \"" + YarnConfigOptions.PROVIDED_LIB_DIRS.key() + "\" should only contain" + " dirs accessible from all worker nodes, while the \"" + path + "\" is local."); } } return providedLibDirs; } public static boolean isUsrLibDirectory(final FileSystem fileSystem, final Path path) throws IOException { final FileStatus fileStatus = fileSystem.getFileStatus(path); // Use the Path obj from fileStatus to get rid of trailing slash return fileStatus.isDirectory() && ConfigConstants.DEFAULT_FLINK_USR_LIB_DIR.equals(fileStatus.getPath().getName()); } public static Optional<Path> getQualifiedRemoteProvidedUsrLib( org.apache.flink.configuration.Configuration configuration, YarnConfiguration yarnConfiguration) throws IOException, IllegalArgumentException { String usrlib = configuration.get(YarnConfigOptions.PROVIDED_USRLIB_DIR); if (usrlib == null) { return Optional.empty(); } final Path qualifiedUsrLibPath = FileSystem.get(yarnConfiguration).makeQualified(new Path(usrlib)); checkArgument( isRemotePath(qualifiedUsrLibPath.toString()), "The \"%s\" must point to a remote dir " + "which is accessible from all worker nodes.", YarnConfigOptions.PROVIDED_USRLIB_DIR.key()); checkArgument( isUsrLibDirectory(FileSystem.get(yarnConfiguration), qualifiedUsrLibPath), "The \"%s\" should be named with \"%s\".", YarnConfigOptions.PROVIDED_USRLIB_DIR.key(), ConfigConstants.DEFAULT_FLINK_USR_LIB_DIR); return Optional.of(qualifiedUsrLibPath); } public static YarnConfiguration getYarnAndHadoopConfiguration( org.apache.flink.configuration.Configuration flinkConfig) { final YarnConfiguration yarnConfig = getYarnConfiguration(flinkConfig); yarnConfig.addResource(HadoopUtils.getHadoopConfiguration(flinkConfig)); return yarnConfig; } /** * Add additional config entries from the flink config to the yarn config. * * @param flinkConfig The Flink configuration object. * @return The yarn configuration. */ public static YarnConfiguration getYarnConfiguration( org.apache.flink.configuration.Configuration flinkConfig) { final YarnConfiguration yarnConfig = new YarnConfiguration(); for (String key : flinkConfig.keySet()) { for (String prefix : FLINK_CONFIG_PREFIXES) { if (key.startsWith(prefix)) { String newKey = key.substring("flink.".length()); String value = flinkConfig.getString(key, null); yarnConfig.set(newKey, value); LOG.debug( "Adding Flink config entry for {} as {}={} to Yarn config", key, newKey, value); } } } return yarnConfig; } /** * Sets the application ACLs for the given ContainerLaunchContext based on the values specified * in the given Flink configuration. Only ApplicationAccessType.VIEW_APP and * ApplicationAccessType.MODIFY_APP ACLs are set, and only if they are configured in the Flink * configuration. If the viewAcls or modifyAcls string contains the WILDCARD_ACL constant, it * will replace the entire string with the WILDCARD_ACL. The resulting map is then set as the * application acls for the given container launch context. * * @param amContainer the ContainerLaunchContext to set the ACLs for. * @param flinkConfig the Flink configuration to read the ACL values from. */ public static void setAclsFor( ContainerLaunchContext amContainer, org.apache.flink.configuration.Configuration flinkConfig) { Map<ApplicationAccessType, String> acls = new HashMap<>(); final String viewAcls = flinkConfig.get(YarnConfigOptions.APPLICATION_VIEW_ACLS); final String modifyAcls = flinkConfig.get(YarnConfigOptions.APPLICATION_MODIFY_ACLS); validateAclString(viewAcls); validateAclString(modifyAcls); if (viewAcls != null && !viewAcls.isEmpty()) { acls.put(ApplicationAccessType.VIEW_APP, viewAcls); } if (modifyAcls != null && !modifyAcls.isEmpty()) { acls.put(ApplicationAccessType.MODIFY_APP, modifyAcls); } if (!acls.isEmpty()) { amContainer.setApplicationACLs(acls); } } /* Validates the ACL string to ensure that it is either null or the wildcard ACL. */ private static void validateAclString(String acl) { if (acl != null && acl.contains("*") && !acl.equals("*")) { throw new IllegalArgumentException( String.format( "Invalid wildcard ACL %s. The ACL wildcard does not support regex. The only valid wildcard ACL is '*'.", acl)); } } public static Path getPathFromLocalFile(File localFile) { return new Path(localFile.toURI()); } public static Path getPathFromLocalFilePathStr(String localPathStr) { return getPathFromLocalFile(new File(localPathStr)); } public static void concatWithSpace(StringBuilder sb, String value) { if (value == null || value.isEmpty()) { return; } sb.append(' '); sb.append(value); } }
googleapis/google-cloud-java
36,146
java-dataplex/proto-google-cloud-dataplex-v1/src/main/java/com/google/cloud/dataplex/v1/ListPartitionsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dataplex/v1/metadata.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dataplex.v1; /** * * * <pre> * List metadata partitions response. * </pre> * * Protobuf type {@code google.cloud.dataplex.v1.ListPartitionsResponse} */ public final class ListPartitionsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dataplex.v1.ListPartitionsResponse) ListPartitionsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListPartitionsResponse.newBuilder() to construct. private ListPartitionsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListPartitionsResponse() { partitions_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListPartitionsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataplex.v1.MetadataProto .internal_static_google_cloud_dataplex_v1_ListPartitionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataplex.v1.MetadataProto .internal_static_google_cloud_dataplex_v1_ListPartitionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataplex.v1.ListPartitionsResponse.class, com.google.cloud.dataplex.v1.ListPartitionsResponse.Builder.class); } public static final int PARTITIONS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.dataplex.v1.Partition> partitions_; /** * * * <pre> * Partitions under the specified parent entity. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Partition partitions = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.dataplex.v1.Partition> getPartitionsList() { return partitions_; } /** * * * <pre> * Partitions under the specified parent entity. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Partition partitions = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.dataplex.v1.PartitionOrBuilder> getPartitionsOrBuilderList() { return partitions_; } /** * * * <pre> * Partitions under the specified parent entity. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Partition partitions = 1;</code> */ @java.lang.Override public int getPartitionsCount() { return partitions_.size(); } /** * * * <pre> * Partitions under the specified parent entity. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Partition partitions = 1;</code> */ @java.lang.Override public com.google.cloud.dataplex.v1.Partition getPartitions(int index) { return partitions_.get(index); } /** * * * <pre> * Partitions under the specified parent entity. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Partition partitions = 1;</code> */ @java.lang.Override public com.google.cloud.dataplex.v1.PartitionOrBuilder getPartitionsOrBuilder(int index) { return partitions_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * remaining results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * remaining results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < partitions_.size(); i++) { output.writeMessage(1, partitions_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < partitions_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, partitions_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dataplex.v1.ListPartitionsResponse)) { return super.equals(obj); } com.google.cloud.dataplex.v1.ListPartitionsResponse other = (com.google.cloud.dataplex.v1.ListPartitionsResponse) obj; if (!getPartitionsList().equals(other.getPartitionsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getPartitionsCount() > 0) { hash = (37 * hash) + PARTITIONS_FIELD_NUMBER; hash = (53 * hash) + getPartitionsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dataplex.v1.ListPartitionsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataplex.v1.ListPartitionsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataplex.v1.ListPartitionsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataplex.v1.ListPartitionsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataplex.v1.ListPartitionsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataplex.v1.ListPartitionsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataplex.v1.ListPartitionsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataplex.v1.ListPartitionsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataplex.v1.ListPartitionsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dataplex.v1.ListPartitionsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataplex.v1.ListPartitionsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataplex.v1.ListPartitionsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.dataplex.v1.ListPartitionsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * List metadata partitions response. * </pre> * * Protobuf type {@code google.cloud.dataplex.v1.ListPartitionsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dataplex.v1.ListPartitionsResponse) com.google.cloud.dataplex.v1.ListPartitionsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataplex.v1.MetadataProto .internal_static_google_cloud_dataplex_v1_ListPartitionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataplex.v1.MetadataProto .internal_static_google_cloud_dataplex_v1_ListPartitionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataplex.v1.ListPartitionsResponse.class, com.google.cloud.dataplex.v1.ListPartitionsResponse.Builder.class); } // Construct using com.google.cloud.dataplex.v1.ListPartitionsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (partitionsBuilder_ == null) { partitions_ = java.util.Collections.emptyList(); } else { partitions_ = null; partitionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dataplex.v1.MetadataProto .internal_static_google_cloud_dataplex_v1_ListPartitionsResponse_descriptor; } @java.lang.Override public com.google.cloud.dataplex.v1.ListPartitionsResponse getDefaultInstanceForType() { return com.google.cloud.dataplex.v1.ListPartitionsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dataplex.v1.ListPartitionsResponse build() { com.google.cloud.dataplex.v1.ListPartitionsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dataplex.v1.ListPartitionsResponse buildPartial() { com.google.cloud.dataplex.v1.ListPartitionsResponse result = new com.google.cloud.dataplex.v1.ListPartitionsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.dataplex.v1.ListPartitionsResponse result) { if (partitionsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { partitions_ = java.util.Collections.unmodifiableList(partitions_); bitField0_ = (bitField0_ & ~0x00000001); } result.partitions_ = partitions_; } else { result.partitions_ = partitionsBuilder_.build(); } } private void buildPartial0(com.google.cloud.dataplex.v1.ListPartitionsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dataplex.v1.ListPartitionsResponse) { return mergeFrom((com.google.cloud.dataplex.v1.ListPartitionsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dataplex.v1.ListPartitionsResponse other) { if (other == com.google.cloud.dataplex.v1.ListPartitionsResponse.getDefaultInstance()) return this; if (partitionsBuilder_ == null) { if (!other.partitions_.isEmpty()) { if (partitions_.isEmpty()) { partitions_ = other.partitions_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensurePartitionsIsMutable(); partitions_.addAll(other.partitions_); } onChanged(); } } else { if (!other.partitions_.isEmpty()) { if (partitionsBuilder_.isEmpty()) { partitionsBuilder_.dispose(); partitionsBuilder_ = null; partitions_ = other.partitions_; bitField0_ = (bitField0_ & ~0x00000001); partitionsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getPartitionsFieldBuilder() : null; } else { partitionsBuilder_.addAllMessages(other.partitions_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.dataplex.v1.Partition m = input.readMessage( com.google.cloud.dataplex.v1.Partition.parser(), extensionRegistry); if (partitionsBuilder_ == null) { ensurePartitionsIsMutable(); partitions_.add(m); } else { partitionsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.dataplex.v1.Partition> partitions_ = java.util.Collections.emptyList(); private void ensurePartitionsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { partitions_ = new java.util.ArrayList<com.google.cloud.dataplex.v1.Partition>(partitions_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dataplex.v1.Partition, com.google.cloud.dataplex.v1.Partition.Builder, com.google.cloud.dataplex.v1.PartitionOrBuilder> partitionsBuilder_; /** * * * <pre> * Partitions under the specified parent entity. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Partition partitions = 1;</code> */ public java.util.List<com.google.cloud.dataplex.v1.Partition> getPartitionsList() { if (partitionsBuilder_ == null) { return java.util.Collections.unmodifiableList(partitions_); } else { return partitionsBuilder_.getMessageList(); } } /** * * * <pre> * Partitions under the specified parent entity. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Partition partitions = 1;</code> */ public int getPartitionsCount() { if (partitionsBuilder_ == null) { return partitions_.size(); } else { return partitionsBuilder_.getCount(); } } /** * * * <pre> * Partitions under the specified parent entity. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Partition partitions = 1;</code> */ public com.google.cloud.dataplex.v1.Partition getPartitions(int index) { if (partitionsBuilder_ == null) { return partitions_.get(index); } else { return partitionsBuilder_.getMessage(index); } } /** * * * <pre> * Partitions under the specified parent entity. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Partition partitions = 1;</code> */ public Builder setPartitions(int index, com.google.cloud.dataplex.v1.Partition value) { if (partitionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePartitionsIsMutable(); partitions_.set(index, value); onChanged(); } else { partitionsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Partitions under the specified parent entity. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Partition partitions = 1;</code> */ public Builder setPartitions( int index, com.google.cloud.dataplex.v1.Partition.Builder builderForValue) { if (partitionsBuilder_ == null) { ensurePartitionsIsMutable(); partitions_.set(index, builderForValue.build()); onChanged(); } else { partitionsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Partitions under the specified parent entity. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Partition partitions = 1;</code> */ public Builder addPartitions(com.google.cloud.dataplex.v1.Partition value) { if (partitionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePartitionsIsMutable(); partitions_.add(value); onChanged(); } else { partitionsBuilder_.addMessage(value); } return this; } /** * * * <pre> * Partitions under the specified parent entity. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Partition partitions = 1;</code> */ public Builder addPartitions(int index, com.google.cloud.dataplex.v1.Partition value) { if (partitionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePartitionsIsMutable(); partitions_.add(index, value); onChanged(); } else { partitionsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Partitions under the specified parent entity. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Partition partitions = 1;</code> */ public Builder addPartitions(com.google.cloud.dataplex.v1.Partition.Builder builderForValue) { if (partitionsBuilder_ == null) { ensurePartitionsIsMutable(); partitions_.add(builderForValue.build()); onChanged(); } else { partitionsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Partitions under the specified parent entity. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Partition partitions = 1;</code> */ public Builder addPartitions( int index, com.google.cloud.dataplex.v1.Partition.Builder builderForValue) { if (partitionsBuilder_ == null) { ensurePartitionsIsMutable(); partitions_.add(index, builderForValue.build()); onChanged(); } else { partitionsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Partitions under the specified parent entity. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Partition partitions = 1;</code> */ public Builder addAllPartitions( java.lang.Iterable<? extends com.google.cloud.dataplex.v1.Partition> values) { if (partitionsBuilder_ == null) { ensurePartitionsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, partitions_); onChanged(); } else { partitionsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Partitions under the specified parent entity. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Partition partitions = 1;</code> */ public Builder clearPartitions() { if (partitionsBuilder_ == null) { partitions_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { partitionsBuilder_.clear(); } return this; } /** * * * <pre> * Partitions under the specified parent entity. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Partition partitions = 1;</code> */ public Builder removePartitions(int index) { if (partitionsBuilder_ == null) { ensurePartitionsIsMutable(); partitions_.remove(index); onChanged(); } else { partitionsBuilder_.remove(index); } return this; } /** * * * <pre> * Partitions under the specified parent entity. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Partition partitions = 1;</code> */ public com.google.cloud.dataplex.v1.Partition.Builder getPartitionsBuilder(int index) { return getPartitionsFieldBuilder().getBuilder(index); } /** * * * <pre> * Partitions under the specified parent entity. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Partition partitions = 1;</code> */ public com.google.cloud.dataplex.v1.PartitionOrBuilder getPartitionsOrBuilder(int index) { if (partitionsBuilder_ == null) { return partitions_.get(index); } else { return partitionsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Partitions under the specified parent entity. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Partition partitions = 1;</code> */ public java.util.List<? extends com.google.cloud.dataplex.v1.PartitionOrBuilder> getPartitionsOrBuilderList() { if (partitionsBuilder_ != null) { return partitionsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(partitions_); } } /** * * * <pre> * Partitions under the specified parent entity. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Partition partitions = 1;</code> */ public com.google.cloud.dataplex.v1.Partition.Builder addPartitionsBuilder() { return getPartitionsFieldBuilder() .addBuilder(com.google.cloud.dataplex.v1.Partition.getDefaultInstance()); } /** * * * <pre> * Partitions under the specified parent entity. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Partition partitions = 1;</code> */ public com.google.cloud.dataplex.v1.Partition.Builder addPartitionsBuilder(int index) { return getPartitionsFieldBuilder() .addBuilder(index, com.google.cloud.dataplex.v1.Partition.getDefaultInstance()); } /** * * * <pre> * Partitions under the specified parent entity. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Partition partitions = 1;</code> */ public java.util.List<com.google.cloud.dataplex.v1.Partition.Builder> getPartitionsBuilderList() { return getPartitionsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dataplex.v1.Partition, com.google.cloud.dataplex.v1.Partition.Builder, com.google.cloud.dataplex.v1.PartitionOrBuilder> getPartitionsFieldBuilder() { if (partitionsBuilder_ == null) { partitionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dataplex.v1.Partition, com.google.cloud.dataplex.v1.Partition.Builder, com.google.cloud.dataplex.v1.PartitionOrBuilder>( partitions_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); partitions_ = null; } return partitionsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * remaining results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * remaining results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * remaining results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * remaining results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * remaining results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dataplex.v1.ListPartitionsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.dataplex.v1.ListPartitionsResponse) private static final com.google.cloud.dataplex.v1.ListPartitionsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dataplex.v1.ListPartitionsResponse(); } public static com.google.cloud.dataplex.v1.ListPartitionsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListPartitionsResponse> PARSER = new com.google.protobuf.AbstractParser<ListPartitionsResponse>() { @java.lang.Override public ListPartitionsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListPartitionsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListPartitionsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dataplex.v1.ListPartitionsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/ctakes
34,219
ctakes-drug-ner/src/main/java/org/apache/ctakes/drugner/fsm/machines/util/SuffixFrequencyFSM.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.ctakes.drugner.fsm.machines.util; import net.openai.util.fsm.AnyCondition; import net.openai.util.fsm.Condition; import net.openai.util.fsm.Machine; import net.openai.util.fsm.State; import org.apache.ctakes.core.fsm.condition.IntegerCondition; import org.apache.ctakes.core.fsm.condition.PunctuationValueCondition; import org.apache.ctakes.core.fsm.condition.TextValueCondition; import org.apache.ctakes.core.fsm.condition.WordSetCondition; import org.apache.ctakes.core.fsm.state.NamedState; import org.apache.ctakes.core.fsm.token.BaseToken; import org.apache.ctakes.drugner.fsm.elements.conditions.ContainsSetTextValueCondition; import org.apache.ctakes.drugner.fsm.output.util.SuffixFrequencyToken; import java.util.*; /** * Uses one or more finite state machines to detect frequency suffix data in the given * input of tokens. * @author Mayo Clinic */ public class SuffixFrequencyFSM { // text fractions Set iv_textSuffixSet = new HashSet(); Set iv_textPrefixSet = new HashSet(); Set iv_frequencySet = new HashSet(); Set iv_middleTermSet = new HashSet(); Set iv_periodSet = new HashSet(); Set iv_hyphenatedSet = new HashSet(); Set iv_singleWordSet = new HashSet(); Set iv_specifiedWordSet = new HashSet(); // contains the finite state machines private Set iv_machineSet = new HashSet(); /** * * Constructor * */ public SuffixFrequencyFSM() { iv_specifiedWordSet.add("noon"); iv_specifiedWordSet.add("lunch"); iv_specifiedWordSet.add("breakfast"); iv_specifiedWordSet.add("dinner"); iv_specifiedWordSet.add("morning"); iv_specifiedWordSet.add("afternoon"); iv_specifiedWordSet.add("evening"); iv_specifiedWordSet.add("am"); iv_specifiedWordSet.add("pm"); iv_specifiedWordSet.add("specified"); iv_singleWordSet.add("weekly"); iv_singleWordSet.add("monthly"); iv_singleWordSet.add("biweekly"); iv_singleWordSet.add("daily"); iv_singleWordSet.add("nightly"); iv_singleWordSet.add("bid"); iv_singleWordSet.add("od"); iv_singleWordSet.add("qd"); iv_singleWordSet.add("hs"); iv_singleWordSet.add("prn"); iv_singleWordSet.add("tid"); iv_singleWordSet.add("q"); iv_textSuffixSet.add("d"); iv_textSuffixSet.add("y"); iv_textSuffixSet.add("m"); iv_textSuffixSet.add("mo"); iv_textSuffixSet.add("yr"); iv_textSuffixSet.add("day"); iv_textSuffixSet.add("daily"); iv_textSuffixSet.add("wk"); iv_textSuffixSet.add("week"); iv_textSuffixSet.add("weeks"); iv_textSuffixSet.add("h"); iv_textSuffixSet.add("hour"); iv_textSuffixSet.add("hours"); iv_textSuffixSet.add("min"); iv_textSuffixSet.add("month"); iv_textSuffixSet.add("months"); iv_textSuffixSet.add("year"); iv_textPrefixSet.add("every"); iv_frequencySet.add("once"); iv_frequencySet.add("twice"); iv_frequencySet.add("one"); iv_frequencySet.add("two"); iv_frequencySet.add("three"); iv_frequencySet.add("four"); iv_frequencySet.add("five"); iv_frequencySet.add("six"); iv_frequencySet.add("seven"); iv_frequencySet.add("eight"); iv_frequencySet.add("nine"); iv_middleTermSet.add("a"); iv_middleTermSet.add("an"); iv_middleTermSet.add("as"); iv_middleTermSet.add("in"); iv_middleTermSet.add("the"); iv_middleTermSet.add("each"); iv_middleTermSet.add("times"); iv_middleTermSet.add("time"); iv_middleTermSet.add("per"); iv_middleTermSet.add("every"); iv_middleTermSet.add("at"); iv_hyphenatedSet.add("once-a-day"); iv_hyphenatedSet.add("once-a-week"); iv_hyphenatedSet.add("twice-a-day"); iv_hyphenatedSet.add("twice-a-week"); iv_hyphenatedSet.add("once-daily"); iv_hyphenatedSet.add("twice-daily"); iv_hyphenatedSet.add("one-a-day"); iv_hyphenatedSet.add("two-a-day"); iv_hyphenatedSet.add("three-a-day"); iv_hyphenatedSet.add("four-a-day"); iv_hyphenatedSet.add("five-a-day"); iv_hyphenatedSet.add("six-a-day"); iv_hyphenatedSet.add("seven-a-day"); iv_hyphenatedSet.add("eight-a-day"); iv_hyphenatedSet.add("nine-a-day"); iv_hyphenatedSet.add("once-weekly"); iv_hyphenatedSet.add("twice-weekly"); iv_hyphenatedSet.add("one-a-week"); iv_hyphenatedSet.add("two-a-week"); iv_hyphenatedSet.add("three-a-week"); iv_hyphenatedSet.add("four-a-week"); iv_hyphenatedSet.add("five-a-week"); iv_hyphenatedSet.add("six-a-week"); iv_hyphenatedSet.add("seven-a-week"); iv_hyphenatedSet.add("eight-a-week"); iv_hyphenatedSet.add("nine-a-week"); iv_hyphenatedSet.add("once-monthly"); iv_hyphenatedSet.add("twice-monthly"); iv_hyphenatedSet.add("one-a-month"); iv_hyphenatedSet.add("two-a-month"); iv_hyphenatedSet.add("three-a-month"); iv_hyphenatedSet.add("four-a-month"); iv_hyphenatedSet.add("five-a-month"); iv_hyphenatedSet.add("six-a-month"); iv_hyphenatedSet.add("seven-a-month"); iv_hyphenatedSet.add("eight-a-month"); iv_hyphenatedSet.add("nine-a-month"); iv_hyphenatedSet.add("once-hourly"); iv_hyphenatedSet.add("twice-hourly"); iv_hyphenatedSet.add("one-an-hour"); iv_hyphenatedSet.add("two-an-hour"); iv_hyphenatedSet.add("three-an-hour"); iv_hyphenatedSet.add("four-an-hour"); iv_hyphenatedSet.add("five-an-hour"); iv_hyphenatedSet.add("six-an-hour"); iv_hyphenatedSet.add("seven-an-hour"); iv_hyphenatedSet.add("eight-an-hour"); iv_hyphenatedSet.add("nine-an-hour"); iv_hyphenatedSet.add("once-nightly"); iv_hyphenatedSet.add("as-needed"); iv_hyphenatedSet.add("twice-nightly"); iv_hyphenatedSet.add("once-every-day"); iv_hyphenatedSet.add("once-daily"); iv_hyphenatedSet.add("twice-daily"); iv_hyphenatedSet.add("one-time-a-day"); iv_hyphenatedSet.add("two-times-a-day"); iv_hyphenatedSet.add("three-times-a-day"); iv_hyphenatedSet.add("four-times-a-day"); iv_hyphenatedSet.add("five-times-a-day"); iv_hyphenatedSet.add("six-times-a-day"); iv_hyphenatedSet.add("seven-times-a-day"); iv_hyphenatedSet.add("eight-times-a-day"); iv_hyphenatedSet.add("nine-times-a-day"); iv_hyphenatedSet.add("once-every-week"); iv_hyphenatedSet.add("twice-every-day"); iv_hyphenatedSet.add("one-time-a-week"); iv_hyphenatedSet.add("two-times-a-week"); iv_hyphenatedSet.add("three-times-a-week"); iv_hyphenatedSet.add("four-times-a-week"); iv_hyphenatedSet.add("five-times-a-week"); iv_hyphenatedSet.add("six-times-a-week"); iv_hyphenatedSet.add("seven-times-a-week"); iv_hyphenatedSet.add("eight-times-a-week"); iv_hyphenatedSet.add("nine-times-a-week"); iv_hyphenatedSet.add("once-every-hour"); iv_hyphenatedSet.add("twice-every-hour"); iv_hyphenatedSet.add("one-time-a-month"); iv_hyphenatedSet.add("two-times-a-month"); iv_hyphenatedSet.add("three-times-a-month"); iv_hyphenatedSet.add("four-times-a-month"); iv_hyphenatedSet.add("five-times-a-month"); iv_hyphenatedSet.add("six-times-a-month"); iv_hyphenatedSet.add("seven-times-a-month"); iv_hyphenatedSet.add("eight-times-a-month"); iv_hyphenatedSet.add("nine-times-a-month"); iv_hyphenatedSet.add("one-time-each-hour"); iv_hyphenatedSet.add("two-times-each-hour"); iv_hyphenatedSet.add("three-times-each-hour"); iv_hyphenatedSet.add("four-times-each-hour"); iv_hyphenatedSet.add("five-times-each-hour"); iv_hyphenatedSet.add("six-times-each-hour"); iv_hyphenatedSet.add("seven-times-each-hour"); iv_hyphenatedSet.add("eight-times-each-hour"); iv_hyphenatedSet.add("nine-times-each-hour"); iv_machineSet.add(getLatin3AbbreviationMachine()); iv_machineSet.add(getLatin2AbbreviationMachine()); iv_machineSet.add(getFrequencyMachine()); } /** * Gets a finite state machine that detects the following: * <ol> * <li>40mg/d</li> * <li>32.1-47.3mg/wk</li> * </ol> * @return */ private Machine getLatin3AbbreviationMachine() { State startState = new NamedState("START"); State endState = new NamedState("END"); endState.setEndStateFlag(true); Machine m = new Machine(startState); State leftAbbreviateQState = new NamedState("LEFT_Q"); State leftAbbreviateBState = new NamedState("LEFT_B"); State leftAbbreviatePState = new NamedState("LEFT_P"); State leftAbbreviateTState = new NamedState("LEFT_T"); State middleAbbreviateQtoAState = new NamedState("MID_Q2A"); State middleAbbreviateQtoDState = new NamedState("MID_Q2D"); State middleAbbreviateQtoHState = new NamedState("MID_Q2H"); State middleAbbreviateQtoIState = new NamedState("MID_Q2I"); State middleAbbreviateQtoMState = new NamedState("MID_Q2M"); State middleAbbreviateQtoOState = new NamedState("MID_Q2O"); State middleAbbreviateQtoWState = new NamedState("MID_Q2W"); State middleAbbreviateQtoPState = new NamedState("MID_Q2P"); State middleAbbreviatePtoRState = new NamedState("MID_P2R"); State middleAbbreviateTtoIState = new NamedState("MID_T2I"); State middleAbbreviateBtoIState = new NamedState("MID_B2I"); State rightAbbreviateQIDState = new NamedState("RIGHT_QID"); State rightAbbreviateQADState = new NamedState("RIGHT_QAD"); State rightAbbreviateQDSState = new NamedState("RIGHT_QDS"); State rightAbbreviateQHSState = new NamedState("RIGHT_QHS"); State rightAbbreviateQWKState = new NamedState("RIGHT_QWK"); State rightAbbreviateQODState = new NamedState("RIGHT_QOD"); State rightAbbreviateQAMState = new NamedState("RIGHT_QAM"); State rightAbbreviateQPMState = new NamedState("RIGHT_QPM"); State rightAbbreviateQMTState = new NamedState("RIGHT_QMT"); State rightAbbreviateBIDState = new NamedState("RIGHT_BID"); State rightAbbreviatePRNState = new NamedState("RIGHT_PRN"); State rightAbbreviateTIDState = new NamedState("RIGHT_TID"); State firstDotQState = new NamedState("FIRSTDOTQ"); State firstDotBState = new NamedState("FIRSTDOTB"); State firstDotPState = new NamedState("FIRSTDOTP"); State firstDotTState = new NamedState("FIRSTDOTT"); State secondDotQtoAState = new NamedState("SECONDDOTQ2A"); State secondDotQtoDState = new NamedState("SECONDDOTQ2D"); State secondDotQtoHState = new NamedState("SECONDDOTQ2H"); State secondDotQtoIState = new NamedState("SECONDDOTQ2I"); State secondDotQtoMState = new NamedState("SECONDDOTQ2M"); State secondDotQtoOState = new NamedState("SECONDDOTQ2O"); State secondDotQtoWState = new NamedState("SECONDDOTQ2W"); State secondDotQtoPState = new NamedState("SECONDDOTQ2P"); State secondDotBtoIState = new NamedState("SECONDDOTB2I"); State secondDotPtoRState = new NamedState("SECONDDOTP2R"); State secondDotTtoIState = new NamedState("SECONDDOTT2I"); Condition firstDotConditionQ = new PunctuationValueCondition('.'); Condition firstDotConditionB = new PunctuationValueCondition('.'); Condition firstDotConditionP = new PunctuationValueCondition('.'); Condition firstDotConditionT = new PunctuationValueCondition('.'); Condition secondDotConditionQH = new PunctuationValueCondition('.'); Condition secondDotConditionQI = new PunctuationValueCondition('.'); Condition secondDotConditionQA = new PunctuationValueCondition('.'); Condition secondDotConditionQD = new PunctuationValueCondition('.'); Condition secondDotConditionQM = new PunctuationValueCondition('.'); Condition secondDotConditionQO = new PunctuationValueCondition('.'); Condition secondDotConditionQW = new PunctuationValueCondition('.'); Condition secondDotConditionQP = new PunctuationValueCondition('.'); Condition secondDotConditionBI = new PunctuationValueCondition('.'); Condition secondDotConditionPR = new PunctuationValueCondition('.'); Condition secondDotConditionTI = new PunctuationValueCondition('.'); Condition thirdDotConditionQHS = new PunctuationValueCondition('.'); Condition thirdDotConditionQAD = new PunctuationValueCondition('.'); Condition thirdDotConditionQID = new PunctuationValueCondition('.'); Condition thirdDotConditionQDS = new PunctuationValueCondition('.'); Condition thirdDotConditionQMT = new PunctuationValueCondition('.'); Condition thirdDotConditionQOD = new PunctuationValueCondition('.'); Condition thirdDotConditionQWK = new PunctuationValueCondition('.'); Condition thirdDotConditionQAM = new PunctuationValueCondition('.'); Condition thirdDotConditionQPM = new PunctuationValueCondition('.'); Condition thirdDotConditionBID = new PunctuationValueCondition('.'); Condition thirdDotConditionPRN = new PunctuationValueCondition('.'); Condition thirdDotConditionTID = new PunctuationValueCondition('.'); startState.addTransition(new TextValueCondition("q", true), leftAbbreviateQState); startState.addTransition(new TextValueCondition("b", true), leftAbbreviateBState); startState.addTransition(new TextValueCondition("p", true), leftAbbreviatePState); startState.addTransition(new TextValueCondition("t", true), leftAbbreviateTState); startState.addTransition(new AnyCondition(), startState); leftAbbreviateQState.addTransition(firstDotConditionQ, firstDotQState); leftAbbreviateQState.addTransition(new AnyCondition(), startState); firstDotQState.addTransition(new TextValueCondition("a", true), middleAbbreviateQtoAState); firstDotQState.addTransition(new TextValueCondition("d", true), middleAbbreviateQtoDState); firstDotQState.addTransition(new TextValueCondition("h", true), middleAbbreviateQtoHState); firstDotQState.addTransition(new TextValueCondition("i", true), middleAbbreviateQtoIState); firstDotQState.addTransition(new TextValueCondition("m", true), middleAbbreviateQtoMState); firstDotQState.addTransition(new TextValueCondition("o", true), middleAbbreviateQtoOState); firstDotQState.addTransition(new TextValueCondition("w", true), middleAbbreviateQtoWState); firstDotQState.addTransition(new TextValueCondition("p", true), middleAbbreviateQtoPState); firstDotQState.addTransition(new AnyCondition(), startState); middleAbbreviateQtoAState.addTransition(secondDotConditionQA, secondDotQtoAState); middleAbbreviateQtoAState.addTransition(new AnyCondition(), startState); middleAbbreviateQtoDState.addTransition(secondDotConditionQD, secondDotQtoDState); middleAbbreviateQtoDState.addTransition(new AnyCondition(), startState); middleAbbreviateQtoHState.addTransition(secondDotConditionQH, secondDotQtoHState); middleAbbreviateQtoHState.addTransition(new AnyCondition(), startState); middleAbbreviateQtoIState.addTransition(secondDotConditionQI, secondDotQtoIState); middleAbbreviateQtoIState.addTransition(new AnyCondition(), startState); middleAbbreviateQtoMState.addTransition(secondDotConditionQM, secondDotQtoMState); middleAbbreviateQtoMState.addTransition(new AnyCondition(), startState); middleAbbreviateQtoOState.addTransition(secondDotConditionQO, secondDotQtoOState); middleAbbreviateQtoOState.addTransition(new AnyCondition(), startState); middleAbbreviateQtoWState.addTransition(secondDotConditionQW, secondDotQtoWState); middleAbbreviateQtoWState.addTransition(new AnyCondition(), startState); middleAbbreviateQtoPState.addTransition(secondDotConditionQP, secondDotQtoPState); middleAbbreviateQtoPState.addTransition(new AnyCondition(), startState); secondDotQtoAState.addTransition(new TextValueCondition("d", true), rightAbbreviateQADState); secondDotQtoAState.addTransition(new AnyCondition(), startState); secondDotQtoDState.addTransition(new TextValueCondition("s", true), rightAbbreviateQDSState); secondDotQtoDState.addTransition(new AnyCondition(), startState); secondDotQtoHState.addTransition(new TextValueCondition("s", true), rightAbbreviateQHSState); secondDotQtoHState.addTransition(new AnyCondition(), startState); secondDotQtoIState.addTransition(new TextValueCondition("d", true), rightAbbreviateQIDState); secondDotQtoIState.addTransition(new AnyCondition(), startState); secondDotQtoMState.addTransition(new TextValueCondition("t", true), rightAbbreviateQMTState); secondDotQtoMState.addTransition(new AnyCondition(), startState); secondDotQtoOState.addTransition(new TextValueCondition("d", true), rightAbbreviateQODState); secondDotQtoOState.addTransition(new AnyCondition(), startState); secondDotQtoWState.addTransition(new TextValueCondition("k", true), rightAbbreviateQWKState); secondDotQtoWState.addTransition(new AnyCondition(), startState); secondDotQtoAState.addTransition(new TextValueCondition("m", true), rightAbbreviateQAMState); secondDotQtoAState.addTransition(new AnyCondition(), startState); secondDotQtoPState.addTransition(new TextValueCondition("m", true), rightAbbreviateQPMState); secondDotQtoPState.addTransition(new AnyCondition(), startState); secondDotBtoIState.addTransition(new TextValueCondition("d", true), endState); secondDotBtoIState.addTransition(new AnyCondition(), startState); rightAbbreviateQADState.addTransition(thirdDotConditionQAD, endState); rightAbbreviateQADState.addTransition(new AnyCondition(), startState); rightAbbreviateQDSState.addTransition(thirdDotConditionQDS, endState); rightAbbreviateQDSState.addTransition(new AnyCondition(), startState); rightAbbreviateQHSState.addTransition(thirdDotConditionQHS, endState); rightAbbreviateQHSState.addTransition(new AnyCondition(), startState); rightAbbreviateQIDState.addTransition(thirdDotConditionQID, endState); rightAbbreviateQIDState.addTransition(new AnyCondition(), startState); rightAbbreviateQMTState.addTransition(thirdDotConditionQMT, endState); rightAbbreviateQMTState.addTransition(new AnyCondition(), startState); rightAbbreviateQODState.addTransition(thirdDotConditionQOD, endState); rightAbbreviateQODState.addTransition(new AnyCondition(), startState); rightAbbreviateQWKState.addTransition(thirdDotConditionQWK, endState); rightAbbreviateQWKState.addTransition(new AnyCondition(), startState); rightAbbreviateQAMState.addTransition(thirdDotConditionQAM, endState); rightAbbreviateQAMState.addTransition(new AnyCondition(), startState); rightAbbreviateQPMState.addTransition(thirdDotConditionQPM, endState); rightAbbreviateQPMState.addTransition(new AnyCondition(), startState); leftAbbreviateBState.addTransition(firstDotConditionB, firstDotBState); leftAbbreviateBState.addTransition(new AnyCondition(), startState); firstDotBState.addTransition(new TextValueCondition("i", true), middleAbbreviateBtoIState); firstDotBState.addTransition(new AnyCondition(), startState); middleAbbreviateBtoIState.addTransition(secondDotConditionBI, secondDotBtoIState); middleAbbreviateBtoIState.addTransition(new AnyCondition(), startState); secondDotBtoIState.addTransition(new TextValueCondition("d", true), rightAbbreviateBIDState); secondDotBtoIState.addTransition(new AnyCondition(), startState); rightAbbreviateBIDState.addTransition(thirdDotConditionBID, endState); leftAbbreviatePState.addTransition(firstDotConditionP, firstDotPState); leftAbbreviatePState.addTransition(new AnyCondition(), startState); leftAbbreviateTState.addTransition(firstDotConditionT, firstDotTState); leftAbbreviateTState.addTransition(new AnyCondition(), startState); firstDotPState.addTransition(new TextValueCondition("r", true), middleAbbreviatePtoRState); firstDotPState.addTransition(new AnyCondition(), startState); firstDotTState.addTransition(new TextValueCondition("i", true), middleAbbreviateTtoIState); firstDotTState.addTransition(new AnyCondition(), startState); middleAbbreviatePtoRState.addTransition(secondDotConditionPR, secondDotPtoRState); middleAbbreviatePtoRState.addTransition(new AnyCondition(), startState); middleAbbreviateTtoIState.addTransition(secondDotConditionTI, secondDotTtoIState); middleAbbreviateTtoIState.addTransition(new AnyCondition(), startState); secondDotPtoRState.addTransition(new TextValueCondition("n", true), rightAbbreviatePRNState); secondDotPtoRState.addTransition(new AnyCondition(), startState); secondDotTtoIState.addTransition(new TextValueCondition("d", true), rightAbbreviateTIDState); secondDotTtoIState.addTransition(new AnyCondition(), startState); rightAbbreviatePRNState.addTransition(thirdDotConditionPRN, endState); rightAbbreviateTIDState.addTransition(thirdDotConditionTID, endState); endState.addTransition(new AnyCondition(), startState); return m; } /** * Gets a finite state machine that detects the following: * <ol> * <li>40mg/d</li> * <li>32.1-47.3mg/wk</li> * </ol> * @return */ private Machine getLatin2AbbreviationMachine() { State startState = new NamedState("START"); State endState = new NamedState("END"); endState.setEndStateFlag(true); Machine m = new Machine(startState); State leftAbbreviateQState = new NamedState("LEFT_Q"); State leftAbbreviateAState = new NamedState("LEFT_A"); State leftAbbreviateOState = new NamedState("LEFT_O"); State leftAbbreviateHState = new NamedState("LEFT_H"); State leftAbbreviatePState = new NamedState("LEFT_P"); State rightAbbreviateQDState = new NamedState("RIGHT_QD"); State rightAbbreviateQHState = new NamedState("RIGHT_QH"); State rightAbbreviateAMState = new NamedState("RIGHT_AM"); State rightAbbreviateODState = new NamedState("RIGHT_OD"); State rightAbbreviateHSState = new NamedState("RIGHT_HS"); State rightAbbreviatePMState = new NamedState("RIGHT_PM"); State firstDotQState = new NamedState("FIRSTDOTQ"); State firstDotAState = new NamedState("FIRSTDOTA"); State firstDotOState = new NamedState("FIRSTDOTO"); State firstDotHState = new NamedState("FIRSTDOTH"); State firstDotPState = new NamedState("FIRSTDOTP"); Condition firstQDDotCondition = new PunctuationValueCondition('.'); Condition secondQDDotCondition = new PunctuationValueCondition('.'); Condition firstODDotCondition = new PunctuationValueCondition('.'); Condition secondQHDotCondition = new PunctuationValueCondition('.'); Condition secondODDotCondition = new PunctuationValueCondition('.'); Condition firstAMDotCondition = new PunctuationValueCondition('.'); Condition firstPMDotCondition = new PunctuationValueCondition('.'); Condition secondAMDotCondition = new PunctuationValueCondition('.'); Condition secondPMDotCondition = new PunctuationValueCondition('.'); Condition firstHSDotCondition = new PunctuationValueCondition('.'); Condition secondHSDotCondition = new PunctuationValueCondition('.'); Condition soloCondition = new WordSetCondition(iv_singleWordSet, true); startState.addTransition(new TextValueCondition("q", true), leftAbbreviateQState); startState.addTransition(new TextValueCondition("a", true), leftAbbreviateAState); startState.addTransition(new TextValueCondition("o", true), leftAbbreviateOState); startState.addTransition(new TextValueCondition("h", true), leftAbbreviateHState); startState.addTransition(new TextValueCondition("p", true), leftAbbreviatePState); startState.addTransition(new AnyCondition(), startState); leftAbbreviateQState.addTransition(firstQDDotCondition, firstDotQState); leftAbbreviateQState.addTransition(new AnyCondition(), startState); leftAbbreviateAState.addTransition(firstAMDotCondition, firstDotAState); leftAbbreviateAState.addTransition(new AnyCondition(), startState); leftAbbreviateOState.addTransition(firstODDotCondition, firstDotOState); leftAbbreviateOState.addTransition(new AnyCondition(), startState); leftAbbreviateHState.addTransition(firstHSDotCondition, firstDotHState); leftAbbreviateHState.addTransition(new AnyCondition(), startState); leftAbbreviatePState.addTransition(firstPMDotCondition, firstDotPState); leftAbbreviatePState.addTransition(new AnyCondition(), startState); firstDotQState.addTransition(soloCondition, endState); firstDotQState.addTransition(new TextValueCondition("d", true), rightAbbreviateQDState); firstDotQState.addTransition(new TextValueCondition("h", true), rightAbbreviateQHState); firstDotQState.addTransition(new AnyCondition(), startState); firstDotAState.addTransition(new TextValueCondition("m", true), rightAbbreviateAMState); firstDotAState.addTransition(new AnyCondition(), startState); firstDotOState.addTransition(new TextValueCondition("d", true), rightAbbreviateODState); firstDotOState.addTransition(new AnyCondition(), startState); firstDotHState.addTransition(new TextValueCondition("s", true), rightAbbreviateHSState); firstDotHState.addTransition(new AnyCondition(), startState); firstDotPState.addTransition(new TextValueCondition("m", true), rightAbbreviatePMState); firstDotPState.addTransition(new AnyCondition(), startState); rightAbbreviateQHState.addTransition(secondQHDotCondition, endState); rightAbbreviateQHState.addTransition(new AnyCondition(), startState); rightAbbreviateAMState.addTransition(secondAMDotCondition, endState); rightAbbreviateAMState.addTransition(new AnyCondition(), startState); rightAbbreviateODState.addTransition(secondODDotCondition, endState); rightAbbreviateODState.addTransition(new AnyCondition(), startState); rightAbbreviateQDState.addTransition(secondQDDotCondition, endState); rightAbbreviateQDState.addTransition(new AnyCondition(), startState); rightAbbreviatePMState.addTransition(secondPMDotCondition, endState); rightAbbreviatePMState.addTransition(new AnyCondition(), startState); rightAbbreviateHSState.addTransition(secondHSDotCondition, endState); rightAbbreviateHSState.addTransition(new AnyCondition(), startState); endState.addTransition(new AnyCondition(), startState); return m; } /** * Gets a finite state machine that detects the following * ('once', 'twice', # or text#) a day/week/month/year: * <ol> * <li>once a day</li> * <li>three times a day</li> * <li>once-a-day</li> * </ol> * @return */ private Machine getFrequencyMachine() { State startState = new NamedState("START"); State endState = new NamedState("END"); endState.setEndStateFlag(true); Machine m = new Machine(startState); State leftAbbreviateState = new NamedState("LEFT_FREQ"); State lastTextState = new NamedState("RIGHT_FREQ"); State middleATextState = new NamedState("MID_TEXT"); State firstDashState = new NamedState("FIRSTDASH"); State secondDashState = new NamedState("SECONDDASH"); Condition integerCondition = new IntegerCondition(); Condition firstDashCondition = new PunctuationValueCondition('-'); Condition secondDashCondition = new PunctuationValueCondition('-'); Condition numericStartCondition = new WordSetCondition(iv_frequencySet, false); Condition hyphenatedCondition = new WordSetCondition(iv_hyphenatedSet, false); Condition firstMiddleTextCondition = new WordSetCondition( iv_middleTermSet, true); Condition secondMiddleTextCondition = new WordSetCondition( iv_middleTermSet, true); Condition thirdMiddleTextCondition = new WordSetCondition( iv_middleTermSet, true); Condition fourthMiddleTextCondition = new WordSetCondition( iv_middleTermSet, true); Condition lastTextCondition = new WordSetCondition(iv_textSuffixSet, false); Condition firstTextCondition = new WordSetCondition(iv_textPrefixSet, false); Condition soloCondition = new WordSetCondition(iv_singleWordSet, true); Condition specificWordCondition = new WordSetCondition( iv_specifiedWordSet, false); Condition containsSoloTermCondition = new ContainsSetTextValueCondition( iv_singleWordSet, true); startState.addTransition(numericStartCondition, leftAbbreviateState); startState.addTransition(firstTextCondition, leftAbbreviateState); startState.addTransition(new TextValueCondition("a", true), leftAbbreviateState); startState.addTransition(integerCondition, leftAbbreviateState); startState.addTransition(hyphenatedCondition, endState); startState.addTransition(containsSoloTermCondition, endState); startState.addTransition(soloCondition, endState); startState.addTransition(new AnyCondition(), startState); leftAbbreviateState.addTransition(firstMiddleTextCondition, middleATextState); leftAbbreviateState.addTransition(firstDashCondition, firstDashState); leftAbbreviateState.addTransition(soloCondition, endState); leftAbbreviateState.addTransition(specificWordCondition, endState); leftAbbreviateState.addTransition(hyphenatedCondition, endState); leftAbbreviateState.addTransition(new AnyCondition(), startState); firstDashState .addTransition(thirdMiddleTextCondition, middleATextState); firstDashState.addTransition(new AnyCondition(), startState); middleATextState .addTransition(secondMiddleTextCondition, lastTextState); middleATextState.addTransition(secondDashCondition, secondDashState); middleATextState.addTransition(lastTextCondition, endState); middleATextState.addTransition(new AnyCondition(), startState); secondDashState.addTransition(fourthMiddleTextCondition, lastTextState); secondDashState.addTransition(lastTextCondition, endState); secondDashState.addTransition(new AnyCondition(), startState); lastTextState.addTransition(lastTextCondition, endState); lastTextState.addTransition(new AnyCondition(), startState); endState.addTransition(new AnyCondition(), startState); return m; } /** * Executes the finite state machines. * @param tokens * @return Set SuffixFrequencyToken objects. * @throws Exception */ public Set execute(List tokens, Set overrideSet) throws Exception { Set rangeSet = new HashSet(); // maps a fsm to a token start index // key = fsm , value = token start index Map tokenStartMap = new HashMap(); Iterator overrideTokenItr = overrideSet.iterator(); // key = start offset, value = override BaseToken object Map overrideTokenMap = new HashMap(); while (overrideTokenItr.hasNext()) { BaseToken t = (BaseToken) overrideTokenItr.next(); Integer key = t.getStartOffset(); overrideTokenMap.put(key, t); } boolean overrideOn = false; int overrideEndOffset = -1; for (int i = 0; i < tokens.size(); i++) { BaseToken token = (BaseToken) tokens.get(i); Integer key = token.getStartOffset(); if (overrideOn) { if (token.getStartOffset() >= overrideEndOffset) { overrideOn = false; overrideEndOffset = -1; } else { // step to next iteration of for loop continue; } } else { if (overrideTokenMap.containsKey(key)) { // override one or more tokens until the override // token is complete token = (BaseToken) overrideTokenMap.get(key); overrideOn = true; overrideEndOffset = token.getEndOffset(); } } Iterator machineItr = iv_machineSet.iterator(); while (machineItr.hasNext()) { Machine fsm = (Machine) machineItr.next(); fsm.input(token); State currentState = fsm.getCurrentState(); if (currentState.getStartStateFlag()) { tokenStartMap.put(fsm, i); } if (currentState.getEndStateFlag()) { Object o = tokenStartMap.get(fsm); int tokenStartIndex; if (o == null) { // By default, all machines start with // token zero. tokenStartIndex = 0; } else { tokenStartIndex = ((Integer) o); // skip ahead over single token we don't want tokenStartIndex++; } BaseToken startToken = (BaseToken) tokens .get(tokenStartIndex); BaseToken endToken = token; SuffixFrequencyToken segmentToken = new SuffixFrequencyToken( startToken.getStartOffset(), endToken .getEndOffset()); rangeSet.add(segmentToken); fsm.reset(); } } } // cleanup tokenStartMap.clear(); // reset machines Iterator itr = iv_machineSet.iterator(); while (itr.hasNext()) { Machine fsm = (Machine) itr.next(); fsm.reset(); } return rangeSet; } /** * Executes the finite state machines. * @param tokens * @return Set of FractionToken objects. * @throws Exception */ public Set execute(List tokens) throws Exception { Set fractionSet = new HashSet(); // maps a fsm to a token start index // key = fsm , value = token start index Map tokenStartMap = new HashMap(); for (int i = 0; i < tokens.size(); i++) { BaseToken token = (BaseToken) tokens.get(i); Iterator machineItr = iv_machineSet.iterator(); while (machineItr.hasNext()) { Machine fsm = (Machine) machineItr.next(); fsm.input(token); State currentState = fsm.getCurrentState(); if (currentState.getStartStateFlag()) { tokenStartMap.put(fsm, i); } if (currentState.getEndStateFlag()) { Object o = tokenStartMap.get(fsm); int tokenStartIndex; if (o == null) { // By default, all machines start with // token zero. tokenStartIndex = 0; } else { tokenStartIndex = ((Integer) o); // skip ahead over single token we don't want tokenStartIndex++; } BaseToken startToken = (BaseToken) tokens .get(tokenStartIndex); BaseToken endToken = token; SuffixFrequencyToken fractionToken = new SuffixFrequencyToken( startToken.getStartOffset(), endToken .getEndOffset()); fractionSet.add(fractionToken); fsm.reset(); } } } // cleanup tokenStartMap.clear(); // reset machines Iterator itr = iv_machineSet.iterator(); while (itr.hasNext()) { Machine fsm = (Machine) itr.next(); fsm.reset(); } return fractionSet; } }
apache/geode
36,011
geode-wan/src/distributedTest/java/org/apache/geode/internal/cache/wan/misc/PDXNewWanDUnitTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache.wan.misc; import static org.apache.geode.cache.Region.SEPARATOR; import static org.apache.geode.test.awaitility.GeodeAwaitility.await; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; import org.apache.geode.cache.Region; import org.apache.geode.cache.partition.PartitionRegionHelper; import org.apache.geode.distributed.DistributedMember; import org.apache.geode.distributed.internal.ClusterDistributionManager; import org.apache.geode.distributed.internal.DistributionMessage; import org.apache.geode.distributed.internal.DistributionMessageObserver; import org.apache.geode.internal.cache.UpdateOperation; import org.apache.geode.internal.cache.wan.WANTestBase; import org.apache.geode.pdx.PdxReader; import org.apache.geode.pdx.PdxSerializable; import org.apache.geode.pdx.PdxWriter; import org.apache.geode.pdx.internal.PeerTypeRegistration; import org.apache.geode.test.dunit.AsyncInvocation; import org.apache.geode.test.dunit.IgnoredException; import org.apache.geode.test.dunit.Wait; import org.apache.geode.test.junit.categories.WanTest; @Category({WanTest.class}) public class PDXNewWanDUnitTest extends WANTestBase { private static final long serialVersionUID = 1L; public static final String KEY_0 = "Key_0"; public PDXNewWanDUnitTest() { super(); } /** * Test 1> Site 1 : 1 locator, 1 member 2> Site 2 : 1 locator, 1 member 3> DR is defined on member * 1 on site1 4> Serial GatewaySender is defined on member 1 on site1 5> Same DR is defined on * site2 member 1 6> Put is done with value which is PDXSerializable 7> Validate whether other * sites member receive this put operation. */ @Test public void testWANPDX_RR_SerialSender() { Integer lnPort = vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1)); Integer nyPort = vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort)); createCacheInVMs(nyPort, vm2); vm2.invoke(WANTestBase::createReceiver); createCacheInVMs(lnPort, vm3); vm3.invoke(() -> WANTestBase.createSender("ln", 2, false, 100, 10, false, false, null, true)); vm2.invoke( () -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", null, isOffHeap())); vm3.invoke(() -> WANTestBase.startSender("ln")); vm3.invoke( () -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln", isOffHeap())); vm3.invoke(() -> WANTestBase.doPutsPDXSerializable(getTestMethodName() + "_RR", 1)); vm2.invoke(() -> WANTestBase.validateRegionSize_PDX(getTestMethodName() + "_RR", 1)); } /** * Test 1> Site 1 : 1 locator, 1 member 2> Site 2 : 1 locator, 1 member 3> DR is defined on member * 1 on site1 4> Serial GatewaySender is defined on member 1 on site1 5> Same DR is defined on * site2 member 1 6> Put is done with value which is PDXSerializable 7> Validate whether other * sites member receive this put operation. 8> Bounce site 1 and delete all of it's data 9> Make * sure that site 1 get the the PDX types along with entries and can deserialize entries. */ @Test public void testWANPDX_RemoveRemoteData() { Integer lnPort = vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1)); Integer nyPort = vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort)); vm2.invoke(() -> WANTestBase.createReceiver_PDX(nyPort)); vm3.invoke(() -> WANTestBase.createCache_PDX(lnPort)); vm3.invoke(() -> WANTestBase.createSender("ln", 2, false, 100, 10, false, false, null, true)); vm2.invoke( () -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", null, isOffHeap())); vm3.invoke(() -> WANTestBase.startSender("ln")); vm3.invoke( () -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln", isOffHeap())); vm3.invoke(() -> WANTestBase.doPutsPDXSerializable(getTestMethodName() + "_RR", 1)); vm2.invoke(() -> WANTestBase.validateRegionSize_PDX(getTestMethodName() + "_RR", 1)); // bounce vm2 vm2.invoke(WANTestBase::closeCache); vm2.invoke(WANTestBase::deletePDXDir); vm2.invoke(() -> WANTestBase.createReceiver_PDX(nyPort)); vm2.invoke( () -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", null, isOffHeap())); vm3.invoke(() -> WANTestBase.doPutsPDXSerializable(getTestMethodName() + "_RR", 2)); vm2.invoke(() -> WANTestBase.validateRegionSize_PDX(getTestMethodName() + "_RR", 2)); } @Test public void testWANPDX_CacheWriterCheck() { Integer lnPort = vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1)); Integer nyPort = vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort)); vm2.invoke(() -> setSystemProperty("gemfire.disk.recoverValues", "false")); vm2.invoke(() -> WANTestBase.createReceiver_PDX(nyPort)); vm3.invoke(() -> WANTestBase.createCache_PDX(lnPort)); vm3.invoke(() -> WANTestBase.createSender("ln", 2, false, 100, 10, false, false, null, true)); vm2.invoke( () -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", null, isOffHeap())); vm3.invoke(() -> WANTestBase.startSender("ln")); vm3.invoke( () -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln", isOffHeap())); vm3.invoke(() -> WANTestBase.doPutsPDXSerializable(getTestMethodName() + "_RR", 1)); vm2.invoke(() -> WANTestBase.validateRegionSize_PDX(getTestMethodName() + "_RR", 1)); // Close VM2 cache vm2.invoke(WANTestBase::closeCache); // do some puts on VM3 and create extra pdx id vm3.invoke(() -> WANTestBase.doPutsPDXSerializable2(getTestMethodName() + "_RR", 2)); // start cache in vm2 again, now it should receive pdx id from vm3 vm2.invoke(() -> WANTestBase.createReceiver_PDX(nyPort)); vm2.invoke( () -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", null, isOffHeap())); try { Wait.pause(10000); // Define a different type from vm3 vm3.invoke(() -> WANTestBase.doPutsPDXSerializable2(getTestMethodName() + "_RR", 2)); // Give the updates some time to make it over the WAN Wait.pause(10000); vm2.invoke(() -> WANTestBase.validateRegionSizeOnly_PDX(getTestMethodName() + "_RR", 2)); vm3.invoke(WANTestBase::closeCache); vm2.invoke(WANTestBase::closeCache); } finally { vm2.invoke(() -> setSystemProperty("gemfire.disk.recoverValues", "true")); } } private void setSystemProperty(String key, String value) { System.setProperty(key, value); } /** * Test 1> Site 1 : 1 locator, 1 member 2> Site 2 : 1 locator, 1 member 3> DR is defined on member * 1 on site1 4> Serial GatewaySender is defined on member 1 on site1 5> Same DR is defined on * site2 member 1 6> Put is done with value which is PDXSerializable 7> Validate whether other * sites member receive this put operation. 8> Bounce site 1 and delete all of it's data 9> Make * some conflicting PDX registries in site 1 before the reconnect 10> Make sure we flag a warning * about the conflicting updates. */ @Test public void testWANPDX_ConflictingData() { Integer lnPort = vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1)); Integer nyPort = vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort)); vm2.invoke(() -> WANTestBase.createReceiver_PDX(nyPort)); vm3.invoke(() -> WANTestBase.createCache_PDX(lnPort)); vm3.invoke(() -> WANTestBase.createSender("ln", 2, false, 100, 10, false, false, null, true)); vm2.invoke( () -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", null, isOffHeap())); vm3.invoke(() -> WANTestBase.startSender("ln")); vm3.invoke( () -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln", isOffHeap())); vm3.invoke(() -> WANTestBase.doPutsPDXSerializable(getTestMethodName() + "_RR", 1)); vm2.invoke(() -> WANTestBase.validateRegionSize_PDX(getTestMethodName() + "_RR", 1)); // bounce vm3 vm3.invoke(WANTestBase::closeCache); IgnoredException ex1 = IgnoredException.addIgnoredException("Trying to add a PDXType with the same id"); IgnoredException ex2 = IgnoredException.addIgnoredException("CacheWriterException"); IgnoredException ex3 = IgnoredException.addIgnoredException("does match the existing PDX type"); IgnoredException ex4 = IgnoredException.addIgnoredException("ServerOperationException"); IgnoredException ex5 = IgnoredException.addIgnoredException("Stopping the processor"); try { // blow away vm3's PDX data vm3.invoke(WANTestBase::deletePDXDir); vm3.invoke(() -> WANTestBase.createCache_PDX(lnPort)); vm3.invoke(() -> WANTestBase.createSender("ln", 2, false, 100, 10, false, false, null, true)); vm3.invoke(() -> WANTestBase.startSender("ln")); vm3.invoke( () -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln", isOffHeap())); // Define a different type from vm3 vm3.invoke(() -> WANTestBase.doPutsPDXSerializable2(getTestMethodName() + "_RR", 2)); // Give the updates some time to make it over the WAN Wait.pause(10000); vm2.invoke(() -> WANTestBase.validateRegionSizeOnly_PDX(getTestMethodName() + "_RR", 2)); vm3.invoke(WANTestBase::closeCache); } finally { ex1.remove(); ex2.remove(); ex3.remove(); ex4.remove(); ex5.remove(); } } /** * Test 1> Site 1 : 1 locator, 1 member 2> Site 2 : 1 locator, 1 member 3> Site 3 : 1 locator, 1 * member 3> DR is defined on member 1 on site1 4> Serial GatewaySender is defined on member 1 on * site1 5> Same DR is defined on site2 member 1 6> Put is done with value which is * PDXSerializable 7> Validate whether other sites member receive this put operation. */ @Test public void testWANPDX_RR_SerialSender3Sites() { Integer lnPort = vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1)); Integer nyPort = vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort)); Integer tkPort = vm2.invoke(() -> WANTestBase.createFirstRemoteLocator(3, lnPort)); createCacheInVMs(lnPort, vm3); createCacheInVMs(nyPort, vm4); createCacheInVMs(tkPort, vm5); vm3.invoke(WANTestBase::createReceiver); vm4.invoke(WANTestBase::createReceiver); vm5.invoke(WANTestBase::createReceiver); // Create all of our gateway senders vm3.invoke(() -> WANTestBase.createSender("ny", 2, false, 100, 10, false, false, null, true)); vm3.invoke(() -> WANTestBase.createSender("tk", 3, false, 100, 10, false, false, null, true)); vm4.invoke(() -> WANTestBase.createSender("ln", 1, false, 100, 10, false, false, null, true)); vm4.invoke(() -> WANTestBase.createSender("tk", 3, false, 100, 10, false, false, null, true)); vm5.invoke(() -> WANTestBase.createSender("ln", 1, false, 100, 10, false, false, null, true)); vm5.invoke(() -> WANTestBase.createSender("ny", 2, false, 100, 10, false, false, null, true)); vm3.invoke(() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ny,tk", isOffHeap())); vm4.invoke(() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln,tk", isOffHeap())); vm5.invoke(() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln,ny", isOffHeap())); // Start all of the senders vm3.invoke(() -> WANTestBase.startSender("ny")); vm3.invoke(() -> WANTestBase.startSender("tk")); vm4.invoke(() -> WANTestBase.startSender("ln")); vm4.invoke(() -> WANTestBase.startSender("tk")); vm5.invoke(() -> WANTestBase.startSender("ln")); vm5.invoke(() -> WANTestBase.startSender("ny")); // Pause ln to ny. This means the PDX type will not be dispatched // to ny from ln vm3.invoke(() -> WANTestBase.pauseSender("ny")); Wait.pause(5000); // Do some puts that define a PDX type in ln vm3.invoke(() -> WANTestBase.doPutsPDXSerializable(getTestMethodName() + "_RR", 1)); // Make sure that tk received the update vm5.invoke(() -> WANTestBase.validateRegionSize_PDX(getTestMethodName() + "_RR", 1)); // Make ny didn't receive the update because the sender is paused vm4.invoke(() -> WANTestBase.validateRegionSize_PDX(getTestMethodName() + "_RR", 0)); // Now, do a put from tk. This serialized object will be distributed // to ny from tk, using the type defined by ln. vm5.invoke(() -> WANTestBase.doPutsPDXSerializable(getTestMethodName() + "_RR", 2)); // Verify the ny can read the object vm4.invoke(() -> WANTestBase.validateRegionSize_PDX(getTestMethodName() + "_RR", 2)); // Wait for vm3 to receive the update (prevents a broken pipe suspect string) vm3.invoke(() -> WANTestBase.validateRegionSize_PDX(getTestMethodName() + "_RR", 2)); } @Test public void testWANPDX_RR_SerialSender_StartedLater() { Integer lnPort = vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1)); Integer nyPort = vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort)); createCacheInVMs(nyPort, vm2); vm2.invoke(WANTestBase::createReceiver); createCacheInVMs(lnPort, vm3); vm3.invoke(() -> WANTestBase.createSender("ln", 2, false, 100, 10, false, false, null, true)); vm2.invoke( () -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", null, isOffHeap())); vm3.invoke( () -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln", isOffHeap())); vm3.invoke(() -> WANTestBase.doPutsPDXSerializable(getTestMethodName() + "_RR", 10)); vm3.invoke(() -> WANTestBase.startSender("ln")); vm3.invoke(() -> WANTestBase.doPutsPDXSerializable(getTestMethodName() + "_RR", 40)); vm2.invoke(() -> WANTestBase.validateRegionSize_PDX(getTestMethodName() + "_RR", 40)); } /** * Test 1> Site 1 : 1 locator, 1 member 2> Site 2 : 1 locator, 1 member 3> PR is defined on member * 1 on site1 4> Serial GatewaySender is defined on member 1 on site1 5> Same PR is defined on * site2 member 1 6> Put is done with value which is PDXSerializable 7> Validate whether other * sites member receive this put operation. */ @Test public void testWANPDX_PR_SerialSender() { Integer lnPort = vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1)); Integer nyPort = vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort)); createCacheInVMs(nyPort, vm2); vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 0, 2, isOffHeap())); vm2.invoke(WANTestBase::createReceiver); createCacheInVMs(lnPort, vm3); vm3.invoke(() -> WANTestBase.createSender("ln", 2, false, 100, 10, false, false, null, true)); vm3.invoke(() -> WANTestBase.startSender("ln")); vm3.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 0, 2, isOffHeap())); vm3.invoke(() -> WANTestBase.doPutsPDXSerializable(getTestMethodName() + "_PR", 1)); vm2.invoke(() -> WANTestBase.validateRegionSize_PDX(getTestMethodName() + "_PR", 1)); } @Test public void testWANPDX_PR_SerialSender_StartedLater() { Integer lnPort = vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1)); Integer nyPort = vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort)); vm2.invoke(() -> WANTestBase.createReceiver_PDX(nyPort)); vm3.invoke(() -> WANTestBase.createCache_PDX(lnPort)); vm3.invoke(() -> WANTestBase.createSender("ln", 2, false, 100, 10, false, false, null, true)); vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 0, 2, isOffHeap())); vm3.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 0, 2, isOffHeap())); vm3.invoke(() -> WANTestBase.doPutsPDXSerializable(getTestMethodName() + "_PR", 20)); vm3.invoke(() -> WANTestBase.startSender("ln")); vm3.invoke(() -> WANTestBase.doPutsPDXSerializable(getTestMethodName() + "_PR", 40)); vm2.invoke(() -> WANTestBase.validateRegionSize_PDX(getTestMethodName() + "_PR", 40)); } /** * Test 1> Site 1 : 1 locator, 2 member 2> Site 2 : 1 locator, 2 member 3> PR is defined on member * 1, 2 on site1 4> Serial GatewaySender is defined on member 1 on site1 5> Same PR is defined on * site2 member 1, 2 6> Put is done with value which is PDXSerializable 7> Validate whether other * sites member receive this put operation. */ @Test public void testWANPDX_PR_MultipleVM_SerialSender() { Integer lnPort = vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1)); Integer nyPort = vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort)); createCacheInVMs(nyPort, vm2); vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 1, 5, isOffHeap())); vm2.invoke(WANTestBase::createReceiver); createCacheInVMs(lnPort, vm3, vm4); vm3.invoke(() -> WANTestBase.createSender("ln", 2, false, 100, 10, false, false, null, true)); vm3.invoke(() -> WANTestBase.startSender("ln")); vm3.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 5, isOffHeap())); vm4.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 5, isOffHeap())); vm3.invoke(() -> WANTestBase.doPutsPDXSerializable(getTestMethodName() + "_PR", 10)); vm2.invoke(() -> WANTestBase.validateRegionSize_PDX(getTestMethodName() + "_PR", 10)); } @Test public void testWANPDX_PR_MultipleVM_SerialSender_StartedLater() { Integer lnPort = vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1)); Integer nyPort = vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort)); vm2.invoke(() -> WANTestBase.createReceiver_PDX(nyPort)); vm3.invoke(() -> WANTestBase.createCache_PDX(lnPort)); vm4.invoke(() -> WANTestBase.createCache_PDX(lnPort)); vm3.invoke(() -> WANTestBase.createSender("ln", 2, false, 100, 10, false, false, null, true)); vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 1, 5, isOffHeap())); vm3.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 5, isOffHeap())); vm4.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 5, isOffHeap())); vm3.invoke(() -> WANTestBase.doPutsPDXSerializable(getTestMethodName() + "_PR", 10)); vm3.invoke(() -> WANTestBase.startSender("ln")); vm4.invoke(() -> WANTestBase.doPutsPDXSerializable(getTestMethodName() + "_PR", 40)); vm2.invoke(() -> WANTestBase.validateRegionSize_PDX(getTestMethodName() + "_PR", 40)); } /** * Test 1> Site 1 : 1 locator, 1 member 2> Site 2 : 1 locator, 1 member 3> PR is defined on member * 1 on site1 4> Parallel GatewaySender is defined on member 1 on site1 5> Same PR is defined on * site2 member 1 6> Put is done with value which is PDXSerializable 7> Validate whether other * sites member receive this put operation. */ @Test public void testWANPDX_PR_ParallelSender() { Integer lnPort = vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1)); Integer nyPort = vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort)); createCacheInVMs(nyPort, vm2); vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 0, 1, isOffHeap())); vm2.invoke(WANTestBase::createReceiver); vm3.invoke(() -> WANTestBase.createCache(lnPort)); vm3.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 0, 1, isOffHeap())); vm3.invoke(() -> WANTestBase.createSender("ln", 2, true, 100, 10, false, false, null, true)); vm3.invoke(() -> WANTestBase.startSender("ln")); vm3.invoke(() -> WANTestBase.waitForSenderRunningState("ln")); vm3.invoke(() -> WANTestBase.doPutsPDXSerializable(getTestMethodName() + "_PR", 1)); vm2.invoke(() -> WANTestBase.validateRegionSize_PDX(getTestMethodName() + "_PR", 1)); } @Test public void testWANPDX_PR_ParallelSender_WithDelayedTypeRegistry() throws InterruptedException, ExecutionException { Integer lnPort = vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1)); Integer nyPort = vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort)); // Create the receiver side of the WAN gateway. Only vm2 will be a receiver, vm3 is // just a peer createCacheInVMs(nyPort, vm2, vm3); vm2.invoke(WANTestBase::createReceiver); vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 0, 4, isOffHeap())); vm3.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 0, 4, isOffHeap())); AsyncInvocation<Void> deserializationFuture; try { // Delay processing of sending type registry update from vm2 vm2.invoke(() -> { DistributionMessageObserver.setInstance(new BlockingPdxTypeUpdateObserver()); }); // Create the sender side of the WAN connection. 2 VMs, with paused senders vm4.invoke(() -> WANTestBase.createCache(lnPort)); vm5.invoke(() -> WANTestBase.createCache(lnPort)); vm4.invoke(() -> WANTestBase.createSender("ln", 2, true, 100, 10, false, false, null, false)); vm5.invoke(() -> WANTestBase.createSender("ln", 2, true, 100, 10, false, false, null, false)); // Create the partitioned region in vm4 vm4.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 0, 4, isOffHeap())); vm5.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 0, 4, isOffHeap())); vm5.invoke(() -> { Region region = cache.getRegion(getTestMethodName() + "_PR"); PartitionRegionHelper.assignBucketsToPartitions(region); }); vm4.invoke(() -> WANTestBase.pauseSender("ln")); vm5.invoke(() -> WANTestBase.pauseSender("ln")); // Do some puts to fill up our queues vm4.invoke(() -> WANTestBase.doPuts(getTestMethodName() + "_PR", 20)); vm4.invoke(() -> { final Region r = cache.getRegion(SEPARATOR + getTestMethodName() + "_PR"); PdxValue result = (PdxValue) r.put(KEY_0, new PdxValue(0)); }); // Force VM4 to be the primary vm4.invoke(() -> { final Region region = cache.getRegion(SEPARATOR + getTestMethodName() + "_PR"); DistributedMember primary = PartitionRegionHelper.getPrimaryMemberForKey(region, KEY_0); // If we are not the primary DistributedMember localMember = cache.getDistributedSystem().getDistributedMember(); if (!primary.equals(localMember)) { PartitionRegionHelper.moveBucketByKey(region, primary, localMember, KEY_0); } }); vm5.invoke(() -> WANTestBase.resumeSender("ln")); boolean blocking = vm2.invoke(() -> { BlockingPdxTypeUpdateObserver observer = (BlockingPdxTypeUpdateObserver) DistributionMessageObserver.getInstance(); return observer.startedBlocking.await(1, TimeUnit.MINUTES); }); assertTrue(blocking); vm4.invoke(() -> WANTestBase.resumeSender("ln")); vm2.invoke(() -> { final Region region = cache.getRegion(SEPARATOR + getTestMethodName() + "_PR"); await().until(() -> region.containsKey(KEY_0)); }); // Make sure vm3 can deserialize the value deserializationFuture = vm3.invokeAsync(() -> { final Region r = cache.getRegion(SEPARATOR + getTestMethodName() + "_PR"); PdxValue result = (PdxValue) r.get(KEY_0); assertEquals(result, new PdxValue(0)); }); try { deserializationFuture.await(10, TimeUnit.SECONDS); fail("Get should have been blocked waiting for PDX type to be distributed"); } catch (TimeoutException e) { // This is what we hope will happen. The get will be blocked by some sort of lock, rather // than failing due to a missing type. } } finally { vm2.invoke(() -> { BlockingPdxTypeUpdateObserver observer = (BlockingPdxTypeUpdateObserver) DistributionMessageObserver.getInstance(); observer.latch.countDown(); }); } deserializationFuture.get(); } @Test public void testWANPDX_PR_ParallelSender_47826() { Integer lnPort = vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1)); Integer nyPort = vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort)); createCacheInVMs(nyPort, vm2); vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 0, 1, isOffHeap())); vm2.invoke(WANTestBase::createReceiver); createCacheInVMs(lnPort, vm3); vm3.invoke(() -> WANTestBase.createSender("ln", 2, true, 100, 10, false, false, null, true)); vm3.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 0, 1, isOffHeap())); vm3.invoke(() -> WANTestBase.startSender("ln")); vm3.invoke(() -> WANTestBase.doPutsPDXSerializable(getTestMethodName() + "_PR", 1)); vm2.invoke(() -> WANTestBase.validateRegionSize_PDX(getTestMethodName() + "_PR", 1)); } @Test public void testWANPDX_PR_ParallelSender_StartedLater() { Integer lnPort = vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1)); Integer nyPort = vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort)); vm2.invoke(() -> WANTestBase.createReceiver_PDX(nyPort)); vm3.invoke(() -> WANTestBase.createCache_PDX(lnPort)); vm3.invoke(() -> WANTestBase.createSender("ln", 2, true, 100, 10, false, false, null, true)); vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 0, 2, isOffHeap())); vm3.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 0, 2, isOffHeap())); vm3.invoke(() -> WANTestBase.doPutsPDXSerializable(getTestMethodName() + "_PR", 10)); vm3.invoke(() -> WANTestBase.startSender("ln")); vm3.invoke(() -> WANTestBase.doPutsPDXSerializable(getTestMethodName() + "_PR", 40)); vm2.invoke(() -> WANTestBase.validateRegionSize_PDX(getTestMethodName() + "_PR", 40)); } @Test public void testWANPDX_PR_MultipleVM_ParallelSender() { Integer lnPort = vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1)); Integer nyPort = vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort)); createCacheInVMs(nyPort, vm2); vm2.invoke(WANTestBase::createReceiver); createCacheInVMs(lnPort, vm3, vm4); vm3.invoke(() -> WANTestBase.createSender("ln", 2, true, 100, 10, false, false, null, true)); vm4.invoke(() -> WANTestBase.createSender("ln", 2, true, 100, 10, false, false, null, true)); vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 0, 2, isOffHeap())); vm3.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 0, 2, isOffHeap())); vm4.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 0, 2, isOffHeap())); startSenderInVMs("ln", vm3, vm4); vm3.invoke(() -> WANTestBase.doPutsPDXSerializable(getTestMethodName() + "_PR", 10)); vm2.invoke(() -> WANTestBase.validateRegionSize_PDX(getTestMethodName() + "_PR", 10)); } @Test public void testWANPDX_PR_MultipleVM_ParallelSender_StartedLater() { Integer lnPort = vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1)); Integer nyPort = vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort)); vm2.invoke(() -> WANTestBase.createReceiver_PDX(nyPort)); vm3.invoke(() -> WANTestBase.createCache_PDX(lnPort)); vm4.invoke(() -> WANTestBase.createCache_PDX(lnPort)); vm3.invoke(() -> WANTestBase.createSender("ln", 2, true, 100, 10, false, false, null, true)); vm4.invoke(() -> WANTestBase.createSender("ln", 2, true, 100, 10, false, false, null, true)); vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 0, 2, isOffHeap())); vm3.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 0, 2, isOffHeap())); vm4.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 0, 2, isOffHeap())); vm3.invoke(() -> WANTestBase.doPutsPDXSerializable(getTestMethodName() + "_PR", 10)); startSenderInVMsAsync("ln", vm3, vm4); vm4.invoke(() -> WANTestBase.doPutsPDXSerializable(getTestMethodName() + "_PR", 40)); vm2.invoke(() -> WANTestBase.validateRegionSize_PDX(getTestMethodName() + "_PR", 40)); } @Test public void testWANPDX_RR_SerialSenderWithFilter() { Integer lnPort = vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1)); Integer nyPort = vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort)); createCacheInVMs(nyPort, vm2); vm2.invoke(WANTestBase::createReceiver); createCacheInVMs(lnPort, vm3); vm3.invoke(() -> WANTestBase.createSender("ln", 2, false, 100, 10, false, false, new PDXGatewayEventFilter(), true)); vm2.invoke( () -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", null, isOffHeap())); vm3.invoke(() -> WANTestBase.startSender("ln")); vm3.invoke( () -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln", isOffHeap())); vm3.invoke(() -> WANTestBase.doPutsPDXSerializable(getTestMethodName() + "_RR", 1)); vm2.invoke(() -> WANTestBase.validateRegionSize_PDX(getTestMethodName() + "_RR", 1)); vm3.invoke(() -> PDXNewWanDUnitTest.verifyFilterInvocation(1)); } @Test public void testWANPDX_PR_MultipleVM_ParallelSenderWithFilter() { Integer lnPort = vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1)); Integer nyPort = vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort)); createCacheInVMs(nyPort, vm2); vm2.invoke(WANTestBase::createReceiver); createCacheInVMs(lnPort, vm3, vm4); vm3.invoke(() -> WANTestBase.createSender("ln", 2, true, 100, 10, false, false, new PDXGatewayEventFilter(), true)); vm4.invoke(() -> WANTestBase.createSender("ln", 2, true, 100, 10, false, false, new PDXGatewayEventFilter(), true)); vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 0, 2, isOffHeap())); vm3.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 0, 2, isOffHeap())); vm4.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 0, 2, isOffHeap())); startSenderInVMs("ln", vm3, vm4); vm3.invoke(() -> WANTestBase.doPutsPDXSerializable(getTestMethodName() + "_PR", 10)); vm2.invoke(() -> WANTestBase.validateRegionSize_PDX(getTestMethodName() + "_PR", 10)); vm3.invoke(() -> PDXNewWanDUnitTest.verifyFilterInvocation(5)); vm4.invoke(() -> PDXNewWanDUnitTest.verifyFilterInvocation(5)); } /** * When remote site bounces then we should send pdx event again. */ @Ignore @Test public void testWANPDX_PR_SerialSender_RemoteSite_Bounce() { Integer lnPort = vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1)); Integer nyPort = vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort)); createCacheInVMs(nyPort, vm2); vm2.invoke(WANTestBase::createReceiver); createCacheInVMs(lnPort, vm3); vm3.invoke(() -> WANTestBase.createSender("ln", 2, false, 100, 10, false, false, null, true)); vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 0, 2, isOffHeap())); vm3.invoke(() -> WANTestBase.startSender("ln")); vm3.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 0, 2, isOffHeap())); vm3.invoke(() -> WANTestBase.doPutsPDXSerializable(getTestMethodName() + "_PR", 1)); vm2.invoke(() -> WANTestBase.validateRegionSize_PDX(getTestMethodName() + "_PR", 1)); vm2.invoke(() -> WANTestBase.killSender()); createReceiverInVMs(vm2, vm4); vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 1, 2, isOffHeap())); vm4.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 1, 2, isOffHeap())); vm3.invoke(() -> WANTestBase.doPutsPDXSerializable(getTestMethodName() + "_PR", 1)); vm2.invoke(() -> WANTestBase.validateRegionSize_PDX(getTestMethodName() + "_PR", 1)); } public static void verifyFilterInvocation(int invocation) { await().untilAsserted( () -> assertEquals(((PDXGatewayEventFilter) eventFilter).beforeEnqueueInvoked, invocation)); await() .untilAsserted( () -> assertEquals(((PDXGatewayEventFilter) eventFilter).beforeTransmitInvoked, invocation)); await().untilAsserted( () -> assertEquals(((PDXGatewayEventFilter) eventFilter).afterAckInvoked, invocation)); } private static class BlockingPdxTypeUpdateObserver extends DistributionMessageObserver { private final CountDownLatch latch = new CountDownLatch(1); private final CountDownLatch startedBlocking = new CountDownLatch(1); @Override public void beforeSendMessage(ClusterDistributionManager dm, DistributionMessage message) { if (message instanceof UpdateOperation.UpdateMessage && ((UpdateOperation.UpdateMessage) message).getRegionPath() .contains(PeerTypeRegistration.REGION_FULL_PATH)) { startedBlocking.countDown(); try { latch.await(); } catch (InterruptedException e) { throw new RuntimeException("Interrupted", e); } } } } public static class PdxValue implements PdxSerializable { public int value; public PdxValue() { } public PdxValue(int value) { this.value = value; } @Override public void toData(PdxWriter writer) { writer.writeInt("value", value); } @Override public void fromData(PdxReader reader) { value = reader.readInt("value"); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } PdxValue pdxValue = (PdxValue) o; return value == pdxValue.value; } @Override public int hashCode() { return value; } } }
googleapis/google-cloud-java
36,224
java-managedkafka/proto-google-cloud-managedkafka-v1/src/main/java/com/google/cloud/managedkafka/v1/UpdateAclRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/managedkafka/v1/managed_kafka.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.managedkafka.v1; /** * * * <pre> * Request for UpdateAcl. * </pre> * * Protobuf type {@code google.cloud.managedkafka.v1.UpdateAclRequest} */ public final class UpdateAclRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.managedkafka.v1.UpdateAclRequest) UpdateAclRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateAclRequest.newBuilder() to construct. private UpdateAclRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateAclRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateAclRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.managedkafka.v1.ManagedKafkaProto .internal_static_google_cloud_managedkafka_v1_UpdateAclRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.managedkafka.v1.ManagedKafkaProto .internal_static_google_cloud_managedkafka_v1_UpdateAclRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.managedkafka.v1.UpdateAclRequest.class, com.google.cloud.managedkafka.v1.UpdateAclRequest.Builder.class); } private int bitField0_; public static final int ACL_FIELD_NUMBER = 1; private com.google.cloud.managedkafka.v1.Acl acl_; /** * * * <pre> * Required. The updated acl. Its `name` and `etag` fields must be populated. * `acl_entries` must not be empty in the updated acl; to remove all acl * entries for an acl, use DeleteAcl. * </pre> * * <code>.google.cloud.managedkafka.v1.Acl acl = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the acl field is set. */ @java.lang.Override public boolean hasAcl() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The updated acl. Its `name` and `etag` fields must be populated. * `acl_entries` must not be empty in the updated acl; to remove all acl * entries for an acl, use DeleteAcl. * </pre> * * <code>.google.cloud.managedkafka.v1.Acl acl = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The acl. */ @java.lang.Override public com.google.cloud.managedkafka.v1.Acl getAcl() { return acl_ == null ? com.google.cloud.managedkafka.v1.Acl.getDefaultInstance() : acl_; } /** * * * <pre> * Required. The updated acl. Its `name` and `etag` fields must be populated. * `acl_entries` must not be empty in the updated acl; to remove all acl * entries for an acl, use DeleteAcl. * </pre> * * <code>.google.cloud.managedkafka.v1.Acl acl = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.managedkafka.v1.AclOrBuilder getAclOrBuilder() { return acl_ == null ? com.google.cloud.managedkafka.v1.Acl.getDefaultInstance() : acl_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * Acl resource by the update. The fields specified in the update_mask are * relative to the resource, not the full request. A field will be overwritten * if it is in the mask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * Acl resource by the update. The fields specified in the update_mask are * relative to the resource, not the full request. A field will be overwritten * if it is in the mask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * Acl resource by the update. The fields specified in the update_mask are * relative to the resource, not the full request. A field will be overwritten * if it is in the mask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getAcl()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getAcl()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.managedkafka.v1.UpdateAclRequest)) { return super.equals(obj); } com.google.cloud.managedkafka.v1.UpdateAclRequest other = (com.google.cloud.managedkafka.v1.UpdateAclRequest) obj; if (hasAcl() != other.hasAcl()) return false; if (hasAcl()) { if (!getAcl().equals(other.getAcl())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasAcl()) { hash = (37 * hash) + ACL_FIELD_NUMBER; hash = (53 * hash) + getAcl().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.managedkafka.v1.UpdateAclRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.managedkafka.v1.UpdateAclRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.managedkafka.v1.UpdateAclRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.managedkafka.v1.UpdateAclRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.managedkafka.v1.UpdateAclRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.managedkafka.v1.UpdateAclRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.managedkafka.v1.UpdateAclRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.managedkafka.v1.UpdateAclRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.managedkafka.v1.UpdateAclRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.managedkafka.v1.UpdateAclRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.managedkafka.v1.UpdateAclRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.managedkafka.v1.UpdateAclRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.managedkafka.v1.UpdateAclRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request for UpdateAcl. * </pre> * * Protobuf type {@code google.cloud.managedkafka.v1.UpdateAclRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.managedkafka.v1.UpdateAclRequest) com.google.cloud.managedkafka.v1.UpdateAclRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.managedkafka.v1.ManagedKafkaProto .internal_static_google_cloud_managedkafka_v1_UpdateAclRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.managedkafka.v1.ManagedKafkaProto .internal_static_google_cloud_managedkafka_v1_UpdateAclRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.managedkafka.v1.UpdateAclRequest.class, com.google.cloud.managedkafka.v1.UpdateAclRequest.Builder.class); } // Construct using com.google.cloud.managedkafka.v1.UpdateAclRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getAclFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; acl_ = null; if (aclBuilder_ != null) { aclBuilder_.dispose(); aclBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.managedkafka.v1.ManagedKafkaProto .internal_static_google_cloud_managedkafka_v1_UpdateAclRequest_descriptor; } @java.lang.Override public com.google.cloud.managedkafka.v1.UpdateAclRequest getDefaultInstanceForType() { return com.google.cloud.managedkafka.v1.UpdateAclRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.managedkafka.v1.UpdateAclRequest build() { com.google.cloud.managedkafka.v1.UpdateAclRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.managedkafka.v1.UpdateAclRequest buildPartial() { com.google.cloud.managedkafka.v1.UpdateAclRequest result = new com.google.cloud.managedkafka.v1.UpdateAclRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.managedkafka.v1.UpdateAclRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.acl_ = aclBuilder_ == null ? acl_ : aclBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.managedkafka.v1.UpdateAclRequest) { return mergeFrom((com.google.cloud.managedkafka.v1.UpdateAclRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.managedkafka.v1.UpdateAclRequest other) { if (other == com.google.cloud.managedkafka.v1.UpdateAclRequest.getDefaultInstance()) return this; if (other.hasAcl()) { mergeAcl(other.getAcl()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getAclFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.managedkafka.v1.Acl acl_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.managedkafka.v1.Acl, com.google.cloud.managedkafka.v1.Acl.Builder, com.google.cloud.managedkafka.v1.AclOrBuilder> aclBuilder_; /** * * * <pre> * Required. The updated acl. Its `name` and `etag` fields must be populated. * `acl_entries` must not be empty in the updated acl; to remove all acl * entries for an acl, use DeleteAcl. * </pre> * * <code>.google.cloud.managedkafka.v1.Acl acl = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the acl field is set. */ public boolean hasAcl() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The updated acl. Its `name` and `etag` fields must be populated. * `acl_entries` must not be empty in the updated acl; to remove all acl * entries for an acl, use DeleteAcl. * </pre> * * <code>.google.cloud.managedkafka.v1.Acl acl = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The acl. */ public com.google.cloud.managedkafka.v1.Acl getAcl() { if (aclBuilder_ == null) { return acl_ == null ? com.google.cloud.managedkafka.v1.Acl.getDefaultInstance() : acl_; } else { return aclBuilder_.getMessage(); } } /** * * * <pre> * Required. The updated acl. Its `name` and `etag` fields must be populated. * `acl_entries` must not be empty in the updated acl; to remove all acl * entries for an acl, use DeleteAcl. * </pre> * * <code>.google.cloud.managedkafka.v1.Acl acl = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setAcl(com.google.cloud.managedkafka.v1.Acl value) { if (aclBuilder_ == null) { if (value == null) { throw new NullPointerException(); } acl_ = value; } else { aclBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The updated acl. Its `name` and `etag` fields must be populated. * `acl_entries` must not be empty in the updated acl; to remove all acl * entries for an acl, use DeleteAcl. * </pre> * * <code>.google.cloud.managedkafka.v1.Acl acl = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setAcl(com.google.cloud.managedkafka.v1.Acl.Builder builderForValue) { if (aclBuilder_ == null) { acl_ = builderForValue.build(); } else { aclBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The updated acl. Its `name` and `etag` fields must be populated. * `acl_entries` must not be empty in the updated acl; to remove all acl * entries for an acl, use DeleteAcl. * </pre> * * <code>.google.cloud.managedkafka.v1.Acl acl = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeAcl(com.google.cloud.managedkafka.v1.Acl value) { if (aclBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && acl_ != null && acl_ != com.google.cloud.managedkafka.v1.Acl.getDefaultInstance()) { getAclBuilder().mergeFrom(value); } else { acl_ = value; } } else { aclBuilder_.mergeFrom(value); } if (acl_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The updated acl. Its `name` and `etag` fields must be populated. * `acl_entries` must not be empty in the updated acl; to remove all acl * entries for an acl, use DeleteAcl. * </pre> * * <code>.google.cloud.managedkafka.v1.Acl acl = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearAcl() { bitField0_ = (bitField0_ & ~0x00000001); acl_ = null; if (aclBuilder_ != null) { aclBuilder_.dispose(); aclBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The updated acl. Its `name` and `etag` fields must be populated. * `acl_entries` must not be empty in the updated acl; to remove all acl * entries for an acl, use DeleteAcl. * </pre> * * <code>.google.cloud.managedkafka.v1.Acl acl = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.managedkafka.v1.Acl.Builder getAclBuilder() { bitField0_ |= 0x00000001; onChanged(); return getAclFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The updated acl. Its `name` and `etag` fields must be populated. * `acl_entries` must not be empty in the updated acl; to remove all acl * entries for an acl, use DeleteAcl. * </pre> * * <code>.google.cloud.managedkafka.v1.Acl acl = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.managedkafka.v1.AclOrBuilder getAclOrBuilder() { if (aclBuilder_ != null) { return aclBuilder_.getMessageOrBuilder(); } else { return acl_ == null ? com.google.cloud.managedkafka.v1.Acl.getDefaultInstance() : acl_; } } /** * * * <pre> * Required. The updated acl. Its `name` and `etag` fields must be populated. * `acl_entries` must not be empty in the updated acl; to remove all acl * entries for an acl, use DeleteAcl. * </pre> * * <code>.google.cloud.managedkafka.v1.Acl acl = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.managedkafka.v1.Acl, com.google.cloud.managedkafka.v1.Acl.Builder, com.google.cloud.managedkafka.v1.AclOrBuilder> getAclFieldBuilder() { if (aclBuilder_ == null) { aclBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.managedkafka.v1.Acl, com.google.cloud.managedkafka.v1.Acl.Builder, com.google.cloud.managedkafka.v1.AclOrBuilder>( getAcl(), getParentForChildren(), isClean()); acl_ = null; } return aclBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * Acl resource by the update. The fields specified in the update_mask are * relative to the resource, not the full request. A field will be overwritten * if it is in the mask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * Acl resource by the update. The fields specified in the update_mask are * relative to the resource, not the full request. A field will be overwritten * if it is in the mask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * Acl resource by the update. The fields specified in the update_mask are * relative to the resource, not the full request. A field will be overwritten * if it is in the mask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * Acl resource by the update. The fields specified in the update_mask are * relative to the resource, not the full request. A field will be overwritten * if it is in the mask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * Acl resource by the update. The fields specified in the update_mask are * relative to the resource, not the full request. A field will be overwritten * if it is in the mask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * Acl resource by the update. The fields specified in the update_mask are * relative to the resource, not the full request. A field will be overwritten * if it is in the mask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * Acl resource by the update. The fields specified in the update_mask are * relative to the resource, not the full request. A field will be overwritten * if it is in the mask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * Acl resource by the update. The fields specified in the update_mask are * relative to the resource, not the full request. A field will be overwritten * if it is in the mask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * Acl resource by the update. The fields specified in the update_mask are * relative to the resource, not the full request. A field will be overwritten * if it is in the mask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.managedkafka.v1.UpdateAclRequest) } // @@protoc_insertion_point(class_scope:google.cloud.managedkafka.v1.UpdateAclRequest) private static final com.google.cloud.managedkafka.v1.UpdateAclRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.managedkafka.v1.UpdateAclRequest(); } public static com.google.cloud.managedkafka.v1.UpdateAclRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateAclRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateAclRequest>() { @java.lang.Override public UpdateAclRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateAclRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateAclRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.managedkafka.v1.UpdateAclRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/pulsar
35,679
pulsar-client-tools-test/src/test/java/org/apache/pulsar/admin/cli/CmdFunctionsTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pulsar.admin.cli; import static org.mockito.Mockito.any; import static org.mockito.Mockito.anyString; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNull; import static org.testng.Assert.assertTrue; import java.io.PrintWriter; import java.io.StringWriter; import lombok.Cleanup; import lombok.extern.slf4j.Slf4j; import org.apache.pulsar.admin.cli.CmdFunctions.CreateFunction; import org.apache.pulsar.admin.cli.CmdFunctions.DeleteFunction; import org.apache.pulsar.admin.cli.CmdFunctions.GetFunction; import org.apache.pulsar.admin.cli.CmdFunctions.GetFunctionStatus; import org.apache.pulsar.admin.cli.CmdFunctions.ListFunctions; import org.apache.pulsar.admin.cli.CmdFunctions.RestartFunction; import org.apache.pulsar.admin.cli.CmdFunctions.StateGetter; import org.apache.pulsar.admin.cli.CmdFunctions.StopFunction; import org.apache.pulsar.admin.cli.CmdFunctions.UpdateFunction; import org.apache.pulsar.client.admin.Functions; import org.apache.pulsar.client.admin.PulsarAdmin; import org.apache.pulsar.common.functions.FunctionConfig; import org.apache.pulsar.common.functions.UpdateOptionsImpl; import org.apache.pulsar.functions.api.Context; import org.apache.pulsar.functions.api.Function; import org.apache.pulsar.functions.api.utils.IdentityFunction; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import picocli.CommandLine; /** * Unit test of {@link CmdFunctions}. */ @Slf4j public class CmdFunctionsTest { private static final String TEST_NAME = "test_name"; private static final String JAR_NAME = CmdFunctionsTest.class.getClassLoader() .getResource("dummyexamples.jar").getFile(); private static final String GO_EXEC_FILE_NAME = "test-go-function-with-url"; private static final String PYTHON_FILE_NAME = "test-go-function-with-url"; private static final String URL = "file:" + JAR_NAME; private static final String URL_WITH_GO = "file:" + GO_EXEC_FILE_NAME; private static final String URL_WITH_PY = "file:" + PYTHON_FILE_NAME; private static final String FN_NAME = TEST_NAME + "-function"; private static final String INPUT_TOPIC_NAME = TEST_NAME + "-input-topic"; private static final String OUTPUT_TOPIC_NAME = TEST_NAME + "-output-topic"; private static final String TENANT = TEST_NAME + "-tenant"; private static final String NAMESPACE = TEST_NAME + "-namespace"; private static final String PACKAGE_URL = "function://sample/ns1/jardummyexamples@1"; private static final String PACKAGE_GO_URL = "function://sample/ns1/godummyexamples@1"; private static final String PACKAGE_PY_URL = "function://sample/ns1/pydummyexamples@1"; private static final String PACKAGE_INVALID_URL = "functionsample.jar"; private static final String BUILTIN_NAR = "dummyexamples"; private PulsarAdmin admin; private Functions functions; private CmdFunctions cmd; public static class DummyFunction implements Function<String, String> { public DummyFunction() { } @Override public String process(String input, Context context) throws Exception { return null; } } @BeforeMethod public void setup() throws Exception { this.admin = mock(PulsarAdmin.class); this.functions = mock(Functions.class); when(admin.functions()).thenReturn(functions); when(admin.getServiceUrl()).thenReturn("http://localhost:1234"); this.cmd = new CmdFunctions(() -> admin); } // @Test // public void testLocalRunnerCmdNoArguments() throws Exception { // cmd.run(new String[] { "run" }); // // LocalRunner runner = cmd.getLocalRunner(); // assertNull(runner.getFunctionName()); // assertNull(runner.getInputs()); // assertNull(runner.getOutput()); // assertNull(runner.getFnConfigFile()); // } /* TODO(sijie):- Can we fix this? @Test public void testLocalRunnerCmdSettings() throws Exception { String fnName = TEST_NAME + "-function"; String sourceTopicName = TEST_NAME + "-source-topic"; String output = TEST_NAME + "-sink-topic"; cmd.run(new String[] { "localrun", "--name", fnName, "--source-topics", sourceTopicName, "--output", output }); LocalRunner runner = cmd.getLocalRunner(); assertEquals(fnName, runner.getFunctionName()); assertEquals(sourceTopicName, runner.getInputs()); assertEquals(output, runner.getOutput()); assertNull(runner.getFnConfigFile()); } @Test public void testLocalRunnerCmdYaml() throws Exception { URL yamlUrl = getClass().getClassLoader().getResource("test_function_config.yml"); String configFile = yamlUrl.getPath(); cmd.run(new String[] { "localrun", "--function-config", configFile }); LocalRunner runner = cmd.getLocalRunner(); assertNull(runner.getFunctionName()); assertNull(runner.getInputs()); assertNull(runner.getOutput()); assertEquals(configFile, runner.getFnConfigFile()); } */ @Test public void testCreateFunction() throws Exception { cmd.run(new String[] { "create", "--name", FN_NAME, "--inputs", INPUT_TOPIC_NAME, "--output", OUTPUT_TOPIC_NAME, "--jar", JAR_NAME, "--auto-ack", "false", "--tenant", "sample", "--namespace", "ns1", "--className", DummyFunction.class.getName(), "--dead-letter-topic", "test-dead-letter-topic", "--custom-runtime-options", "custom-runtime-options", "--user-config", "{\"key\": [\"value1\", \"value2\"]}", "--runtime-flags", "--add-opens java.base/java.lang=ALL-UNNAMED" }); CreateFunction creater = cmd.getCreater(); assertEquals(FN_NAME, creater.getFunctionName()); assertEquals(INPUT_TOPIC_NAME, creater.getInputs()); assertEquals(OUTPUT_TOPIC_NAME, creater.getOutput()); assertEquals(Boolean.FALSE, creater.getAutoAck()); assertEquals("test-dead-letter-topic", creater.getDeadLetterTopic()); assertEquals("custom-runtime-options", creater.getCustomRuntimeOptions()); assertEquals("--add-opens java.base/java.lang=ALL-UNNAMED", creater.getRuntimeFlags()); verify(functions, times(1)).createFunction(any(FunctionConfig.class), anyString()); } @Test public void restartFunction() throws Exception { String tenant = "sample"; String namespace = "ns1"; int instanceId = 0; cmd.run(new String[] { "restart", "--tenant", tenant, "--namespace", namespace, "--name", FN_NAME, "--instance-id", Integer.toString(instanceId)}); RestartFunction restarter = cmd.getRestarter(); assertEquals(FN_NAME, restarter.getFunctionName()); verify(functions, times(1)).restartFunction(tenant, namespace, FN_NAME, instanceId); } @Test public void restartFunctionInstances() throws Exception { String tenant = "sample"; String namespace = "ns1"; cmd.run(new String[] { "restart", "--tenant", tenant, "--namespace", namespace, "--name", FN_NAME}); RestartFunction restarter = cmd.getRestarter(); assertEquals(FN_NAME, restarter.getFunctionName()); verify(functions, times(1)).restartFunction(tenant, namespace, FN_NAME); } @Test public void stopFunction() throws Exception { String tenant = "sample"; String namespace = "ns1"; int instanceId = 0; cmd.run(new String[] { "stop", "--tenant", tenant, "--namespace", namespace, "--name", FN_NAME, "--instance-id", Integer.toString(instanceId)}); StopFunction stop = cmd.getStopper(); assertEquals(FN_NAME, stop.getFunctionName()); verify(functions, times(1)).stopFunction(tenant, namespace, FN_NAME, instanceId); } @Test public void stopFunctionInstances() throws Exception { String tenant = "sample"; String namespace = "ns1"; cmd.run(new String[] { "stop", "--tenant", tenant, "--namespace", namespace, "--name", FN_NAME}); StopFunction stop = cmd.getStopper(); assertEquals(FN_NAME, stop.getFunctionName()); verify(functions, times(1)).stopFunction(tenant, namespace, FN_NAME); } @Test public void startFunction() throws Exception { String tenant = "sample"; String namespace = "ns1"; int instanceId = 0; cmd.run(new String[] { "start", "--tenant", tenant, "--namespace", namespace, "--name", FN_NAME, "--instance-id", Integer.toString(instanceId)}); CmdFunctions.StartFunction stop = cmd.getStarter(); assertEquals(FN_NAME, stop.getFunctionName()); verify(functions, times(1)).startFunction(tenant, namespace, FN_NAME, instanceId); } @Test public void startFunctionInstances() throws Exception { String tenant = "sample"; String namespace = "ns1"; cmd.run(new String[] { "start", "--tenant", tenant, "--namespace", namespace, "--name", FN_NAME}); CmdFunctions.StartFunction stop = cmd.getStarter(); assertEquals(FN_NAME, stop.getFunctionName()); verify(functions, times(1)).startFunction(tenant, namespace, FN_NAME); } @Test public void testGetFunctionStatus() throws Exception { String tenant = "sample"; String namespace = "ns1"; int instanceId = 0; cmd.run(new String[] { "getstatus", "--tenant", tenant, "--namespace", namespace, "--name", FN_NAME, "--instance-id", Integer.toString(instanceId)}); GetFunctionStatus status = cmd.getStatuser(); assertEquals(FN_NAME, status.getFunctionName()); verify(functions, times(1)).getFunctionStatus(tenant, namespace, FN_NAME, instanceId); } @Test public void testCreateFunctionWithFileUrl() throws Exception { cmd.run(new String[] { "create", "--name", FN_NAME, "--inputs", INPUT_TOPIC_NAME, "--output", OUTPUT_TOPIC_NAME, "--jar", URL, "--tenant", "sample", "--namespace", "ns1", "--className", DummyFunction.class.getName(), }); CreateFunction creater = cmd.getCreater(); assertEquals(FN_NAME, creater.getFunctionName()); assertEquals(INPUT_TOPIC_NAME, creater.getInputs()); assertEquals(OUTPUT_TOPIC_NAME, creater.getOutput()); verify(functions, times(1)).createFunctionWithUrl(any(FunctionConfig.class), anyString()); } @Test public void testCreateGoFunctionWithFileUrl() throws Exception { cmd.run(new String[] { "create", "--name", "test-go-function", "--inputs", INPUT_TOPIC_NAME, "--output", OUTPUT_TOPIC_NAME, "--go", URL_WITH_GO, "--tenant", "sample", "--namespace", "ns1", }); CreateFunction creater = cmd.getCreater(); assertEquals("test-go-function", creater.getFunctionName()); assertEquals(INPUT_TOPIC_NAME, creater.getInputs()); assertEquals(OUTPUT_TOPIC_NAME, creater.getOutput()); verify(functions, times(1)).createFunctionWithUrl(any(FunctionConfig.class), anyString()); } @Test public void testCreatePyFunctionWithFileUrl() throws Exception { cmd.run(new String[] { "create", "--name", "test-py-function", "--inputs", INPUT_TOPIC_NAME, "--output", OUTPUT_TOPIC_NAME, "--py", URL_WITH_PY, "--tenant", "sample", "--namespace", "ns1", "--className", "process_python_function", }); CreateFunction creater = cmd.getCreater(); assertEquals("test-py-function", creater.getFunctionName()); assertEquals(INPUT_TOPIC_NAME, creater.getInputs()); assertEquals(OUTPUT_TOPIC_NAME, creater.getOutput()); verify(functions, times(1)).createFunctionWithUrl(any(FunctionConfig.class), anyString()); } @Test public void testCreateFunctionWithPackageUrl() throws Exception { cmd.run(new String[] { "create", "--name", FN_NAME, "--inputs", INPUT_TOPIC_NAME, "--output", OUTPUT_TOPIC_NAME, "--jar", PACKAGE_URL, "--tenant", "sample", "--namespace", "ns1", "--className", DummyFunction.class.getName(), }); CreateFunction creater = cmd.getCreater(); assertEquals(FN_NAME, creater.getFunctionName()); assertEquals(INPUT_TOPIC_NAME, creater.getInputs()); assertEquals(OUTPUT_TOPIC_NAME, creater.getOutput()); verify(functions, times(1)).createFunctionWithUrl(any(FunctionConfig.class), anyString()); } @Test public void testCreateGoFunctionWithPackageUrl() throws Exception { cmd.run(new String[] { "create", "--name", "test-go-function", "--inputs", INPUT_TOPIC_NAME, "--output", OUTPUT_TOPIC_NAME, "--go", PACKAGE_GO_URL, "--tenant", "sample", "--namespace", "ns1", }); CreateFunction creater = cmd.getCreater(); assertEquals("test-go-function", creater.getFunctionName()); assertEquals(INPUT_TOPIC_NAME, creater.getInputs()); assertEquals(OUTPUT_TOPIC_NAME, creater.getOutput()); verify(functions, times(1)).createFunctionWithUrl(any(FunctionConfig.class), anyString()); } @Test public void testCreatePyFunctionWithPackageUrl() throws Exception { cmd.run(new String[] { "create", "--name", "test-py-function", "--inputs", INPUT_TOPIC_NAME, "--output", OUTPUT_TOPIC_NAME, "--py", PACKAGE_PY_URL, "--tenant", "sample", "--namespace", "ns1", "--className", "process_python_function", }); CreateFunction creater = cmd.getCreater(); assertEquals("test-py-function", creater.getFunctionName()); assertEquals(INPUT_TOPIC_NAME, creater.getInputs()); assertEquals(OUTPUT_TOPIC_NAME, creater.getOutput()); verify(functions, times(1)).createFunctionWithUrl(any(FunctionConfig.class), anyString()); } @Test public void testCreateFunctionWithInvalidPackageUrl() throws Exception { cmd.run(new String[] { "create", "--name", FN_NAME, "--inputs", INPUT_TOPIC_NAME, "--output", OUTPUT_TOPIC_NAME, "--jar", PACKAGE_INVALID_URL, "--tenant", "sample", "--namespace", "ns1", "--className", DummyFunction.class.getName(), }); CreateFunction creater = cmd.getCreater(); assertEquals(FN_NAME, creater.getFunctionName()); assertEquals(INPUT_TOPIC_NAME, creater.getInputs()); assertEquals(OUTPUT_TOPIC_NAME, creater.getOutput()); verify(functions, times(0)).createFunctionWithUrl(any(FunctionConfig.class), anyString()); } @Test public void testCreateFunctionWithBuiltinNar() throws Exception { cmd.run(new String[] { "create", "--name", FN_NAME, "--inputs", INPUT_TOPIC_NAME, "--output", OUTPUT_TOPIC_NAME, "--function-type", BUILTIN_NAR, "--tenant", "sample", "--namespace", "ns1", }); CreateFunction creater = cmd.getCreater(); assertEquals(FN_NAME, creater.getFunctionName()); assertEquals(INPUT_TOPIC_NAME, creater.getInputs()); assertEquals(OUTPUT_TOPIC_NAME, creater.getOutput()); assertEquals("builtin://" + BUILTIN_NAR, creater.getFunctionConfig().getJar()); verify(functions, times(1)).createFunction(any(FunctionConfig.class), anyString()); } @Test public void testCreateFunctionWithoutClassName() throws Exception { cmd.run(new String[] { "create", "--name", FN_NAME, "--inputs", INPUT_TOPIC_NAME, "--output", OUTPUT_TOPIC_NAME, "--jar", PACKAGE_URL, "--tenant", "sample", "--namespace", "ns1", }); verify(functions, times(0)).createFunctionWithUrl(any(FunctionConfig.class), anyString()); } @Test public void testCreateFunctionWithoutBasicArguments() throws Exception { cmd.run(new String[] { "create", "--inputs", INPUT_TOPIC_NAME, "--output", OUTPUT_TOPIC_NAME, "--jar", URL, "--className", IdentityFunction.class.getName(), }); CreateFunction creater = cmd.getCreater(); assertEquals("IdentityFunction", creater.getFunctionConfig().getName()); assertEquals("public", creater.getFunctionConfig().getTenant()); assertEquals("default", creater.getFunctionConfig().getNamespace()); assertEquals(INPUT_TOPIC_NAME, creater.getInputs()); assertEquals(OUTPUT_TOPIC_NAME, creater.getOutput()); verify(functions, times(1)).createFunctionWithUrl(any(FunctionConfig.class), anyString()); } @Test public void testCreateFunctionWithTopicPatterns() throws Exception { String topicPatterns = "persistent://tenant/ns/topicPattern*"; cmd.run(new String[] { "create", "--name", FN_NAME, "--topicsPattern", topicPatterns, "--output", OUTPUT_TOPIC_NAME, "--jar", JAR_NAME, "--tenant", "sample", "--namespace", "ns1", "--className", DummyFunction.class.getName(), }); CreateFunction creater = cmd.getCreater(); assertEquals(FN_NAME, creater.getFunctionName()); assertEquals(topicPatterns, creater.getTopicsPattern()); assertEquals(OUTPUT_TOPIC_NAME, creater.getOutput()); verify(functions, times(1)).createFunction(any(FunctionConfig.class), anyString()); } @Test public void testCreateUsingFullyQualifiedFunctionName() throws Exception { String tenant = "sample"; String namespace = "ns1"; String functionName = "func"; String fqfn = String.format("%s/%s/%s", tenant, namespace, functionName); cmd.run(new String[] { "create", "--inputs", INPUT_TOPIC_NAME, "--output", OUTPUT_TOPIC_NAME, "--fqfn", fqfn, "--jar", JAR_NAME, "--className", DummyFunction.class.getName(), }); CreateFunction creater = cmd.getCreater(); assertEquals(tenant, creater.getFunctionConfig().getTenant()); assertEquals(namespace, creater.getFunctionConfig().getNamespace()); assertEquals(functionName, creater.getFunctionConfig().getName()); verify(functions, times(1)).createFunction(any(FunctionConfig.class), anyString()); } @Test public void testCreateWithoutOutputTopicWithSkipFlag() throws Exception { cmd.run(new String[] { "create", "--inputs", INPUT_TOPIC_NAME, "--jar", JAR_NAME, "--tenant", "sample", "--namespace", "ns1", "--className", DummyFunction.class.getName(), }); CreateFunction creater = cmd.getCreater(); assertNull(creater.getFunctionConfig().getOutput()); verify(functions, times(1)).createFunction(any(FunctionConfig.class), anyString()); } @Test public void testCreateWithoutOutputTopic() throws Exception { @Cleanup StringWriter stringWriter = new StringWriter(); @Cleanup PrintWriter printWriter = new PrintWriter(stringWriter); cmd.getCommander().setOut(printWriter); cmd.run(new String[] { "create", "--inputs", INPUT_TOPIC_NAME, "--jar", JAR_NAME, "--tenant", "sample", "--namespace", "ns1", "--className", DummyFunction.class.getName(), }); CreateFunction creater = cmd.getCreater(); assertNull(creater.getFunctionConfig().getOutput()); assertTrue(stringWriter.toString().contains("Created successfully")); } @Test public void testGetFunction() throws Exception { cmd.run(new String[] { "get", "--name", FN_NAME, "--tenant", TENANT, "--namespace", NAMESPACE }); GetFunction getter = cmd.getGetter(); assertEquals(FN_NAME, getter.getFunctionName()); assertEquals(TENANT, getter.getTenant()); assertEquals(NAMESPACE, getter.getNamespace()); verify(functions, times(1)).getFunction(eq(TENANT), eq(NAMESPACE), eq(FN_NAME)); } @Test public void testDeleteFunction() throws Exception { cmd.run(new String[] { "delete", "--name", FN_NAME, "--tenant", TENANT, "--namespace", NAMESPACE }); DeleteFunction deleter = cmd.getDeleter(); assertEquals(FN_NAME, deleter.getFunctionName()); assertEquals(TENANT, deleter.getTenant()); assertEquals(NAMESPACE, deleter.getNamespace()); verify(functions, times(1)).deleteFunction(eq(TENANT), eq(NAMESPACE), eq(FN_NAME)); } @Test public void testUpdateFunction() throws Exception { cmd.run(new String[] { "update", "--name", FN_NAME, "--inputs", INPUT_TOPIC_NAME, "--output", OUTPUT_TOPIC_NAME, "--jar", JAR_NAME, "--tenant", "sample", "--namespace", "ns1", "--className", DummyFunction.class.getName(), }); UpdateFunction updater = cmd.getUpdater(); assertEquals(FN_NAME, updater.getFunctionName()); assertEquals(INPUT_TOPIC_NAME, updater.getInputs()); assertEquals(OUTPUT_TOPIC_NAME, updater.getOutput()); verify(functions, times(1)).updateFunction(any(FunctionConfig.class), anyString(), eq(new UpdateOptionsImpl())); } @Test public void testListFunctions() throws Exception { cmd.run(new String[] { "list", "--tenant", TENANT, "--namespace", NAMESPACE }); ListFunctions lister = cmd.getLister(); assertEquals(TENANT, lister.getTenant()); assertEquals(NAMESPACE, lister.getNamespace()); verify(functions, times(1)).getFunctions(eq(TENANT), eq(NAMESPACE)); } @Test public void testListFunctionsWithDefaultValue() throws Exception { cmd.run(new String[] { "list", }); ListFunctions lister = cmd.getLister(); assertEquals("public", lister.getTenant()); assertEquals("default", lister.getNamespace()); verify(functions, times(1)).getFunctions(eq("public"), eq("default")); } @Test public void testStateGetter() throws Exception { String key = TEST_NAME + "-key"; cmd.run(new String[] { "querystate", "--tenant", TENANT, "--namespace", NAMESPACE, "--name", FN_NAME, "--key", key }); StateGetter stateGetter = cmd.getStateGetter(); assertEquals(TENANT, stateGetter.getTenant()); assertEquals(NAMESPACE, stateGetter.getNamespace()); assertEquals(FN_NAME, stateGetter.getFunctionName()); verify(functions, times(1)).getFunctionState(eq(TENANT), eq(NAMESPACE), eq(FN_NAME), eq(key)); } @Test public void testStateGetterWithoutKey() throws Exception { CommandLine commander = cmd.getCommander(); @Cleanup StringWriter stringWriter = new StringWriter(); @Cleanup PrintWriter printWriter = new PrintWriter(stringWriter); commander.setErr(printWriter); cmd.run(new String[]{ "querystate", "--tenant", TENANT, "--namespace", NAMESPACE, "--name", FN_NAME, }); assertTrue(stringWriter.toString().startsWith(("State key needs to be specified"))); StateGetter stateGetter = cmd.getStateGetter(); assertEquals(TENANT, stateGetter.getTenant()); assertEquals(NAMESPACE, stateGetter.getNamespace()); assertEquals(FN_NAME, stateGetter.getFunctionName()); verify(functions, times(0)).getFunctionState(any(), any(), any(), any()); } @Test public void testCreateFunctionWithCpu() throws Exception { cmd.run(new String[] { "create", "--name", FN_NAME, "--inputs", INPUT_TOPIC_NAME, "--output", OUTPUT_TOPIC_NAME, "--jar", URL, "--tenant", "sample", "--namespace", "ns1", "--className", DummyFunction.class.getName(), "--cpu", "5.0" }); CreateFunction creater = cmd.getCreater(); assertEquals(FN_NAME, creater.getFunctionName()); assertEquals(INPUT_TOPIC_NAME, creater.getInputs()); assertEquals(OUTPUT_TOPIC_NAME, creater.getOutput()); assertEquals(creater.getFunctionConfig().getResources().getCpu(), 5.0, 0); // Disk/Ram should be default assertEquals(creater.getFunctionConfig().getResources().getRam(), Long.valueOf(1073741824L)); assertEquals(creater.getFunctionConfig().getResources().getDisk(), Long.valueOf(10737418240L)); verify(functions, times(1)).createFunctionWithUrl(any(FunctionConfig.class), anyString()); } @Test public void testCreateFunctionWithRam() throws Exception { cmd.run(new String[] { "create", "--name", FN_NAME, "--inputs", INPUT_TOPIC_NAME, "--output", OUTPUT_TOPIC_NAME, "--jar", URL, "--tenant", "sample", "--namespace", "ns1", "--className", DummyFunction.class.getName(), "--ram", "5656565656" }); CreateFunction creater = cmd.getCreater(); assertEquals(FN_NAME, creater.getFunctionName()); assertEquals(INPUT_TOPIC_NAME, creater.getInputs()); assertEquals(OUTPUT_TOPIC_NAME, creater.getOutput()); assertEquals(creater.getFunctionConfig().getResources().getRam(), Long.valueOf(5656565656L)); // cpu/disk should be default assertEquals(creater.getFunctionConfig().getResources().getCpu(), 1.0, 0); assertEquals(creater.getFunctionConfig().getResources().getDisk(), Long.valueOf(10737418240L)); verify(functions, times(1)).createFunctionWithUrl(any(FunctionConfig.class), anyString()); } @Test public void testCreateFunctionWithDisk() throws Exception { cmd.run(new String[] { "create", "--name", FN_NAME, "--inputs", INPUT_TOPIC_NAME, "--output", OUTPUT_TOPIC_NAME, "--jar", URL, "--tenant", "sample", "--namespace", "ns1", "--className", DummyFunction.class.getName(), "--disk", "8080808080808080" }); CreateFunction creater = cmd.getCreater(); assertEquals(FN_NAME, creater.getFunctionName()); assertEquals(INPUT_TOPIC_NAME, creater.getInputs()); assertEquals(OUTPUT_TOPIC_NAME, creater.getOutput()); assertEquals(creater.getFunctionConfig().getResources().getDisk(), Long.valueOf(8080808080808080L)); // cpu/Ram should be default assertEquals(creater.getFunctionConfig().getResources().getRam(), Long.valueOf(1073741824L)); assertEquals(creater.getFunctionConfig().getResources().getCpu(), 1.0, 0); verify(functions, times(1)).createFunctionWithUrl(any(FunctionConfig.class), anyString()); } @Test public void testUpdateFunctionWithCpu() throws Exception { cmd.run(new String[] { "update", "--name", FN_NAME, "--inputs", INPUT_TOPIC_NAME, "--output", OUTPUT_TOPIC_NAME, "--jar", URL, "--tenant", "sample", "--namespace", "ns1", "--className", DummyFunction.class.getName(), "--cpu", "5.0" }); UpdateFunction updater = cmd.getUpdater(); assertEquals(FN_NAME, updater.getFunctionName()); assertEquals(INPUT_TOPIC_NAME, updater.getInputs()); assertEquals(OUTPUT_TOPIC_NAME, updater.getOutput()); assertEquals(updater.getFunctionConfig().getResources().getCpu(), 5.0, 0); // Disk/Ram should be default assertEquals(updater.getFunctionConfig().getResources().getRam(), Long.valueOf(1073741824L)); assertEquals(updater.getFunctionConfig().getResources().getDisk(), Long.valueOf(10737418240L)); verify(functions, times(1)).updateFunctionWithUrl( any(FunctionConfig.class), anyString(), eq(new UpdateOptionsImpl())); } @Test public void testUpdateFunctionWithRam() throws Exception { cmd.run(new String[] { "update", "--name", FN_NAME, "--inputs", INPUT_TOPIC_NAME, "--output", OUTPUT_TOPIC_NAME, "--jar", URL, "--tenant", "sample", "--namespace", "ns1", "--className", DummyFunction.class.getName(), "--ram", "5656565656" }); UpdateFunction updater = cmd.getUpdater(); assertEquals(FN_NAME, updater.getFunctionName()); assertEquals(INPUT_TOPIC_NAME, updater.getInputs()); assertEquals(OUTPUT_TOPIC_NAME, updater.getOutput()); assertEquals(updater.getFunctionConfig().getResources().getRam(), Long.valueOf(5656565656L)); // cpu/disk should be default assertEquals(updater.getFunctionConfig().getResources().getCpu(), 1.0, 0); assertEquals(updater.getFunctionConfig().getResources().getDisk(), Long.valueOf(10737418240L)); verify(functions, times(1)).updateFunctionWithUrl( any(FunctionConfig.class), anyString(), eq(new UpdateOptionsImpl())); } @Test public void testUpdateFunctionWithDisk() throws Exception { cmd.run(new String[] { "update", "--name", FN_NAME, "--inputs", INPUT_TOPIC_NAME, "--output", OUTPUT_TOPIC_NAME, "--jar", URL, "--tenant", "sample", "--namespace", "ns1", "--className", DummyFunction.class.getName(), "--disk", "8080808080808080" }); UpdateFunction updater = cmd.getUpdater(); assertEquals(FN_NAME, updater.getFunctionName()); assertEquals(INPUT_TOPIC_NAME, updater.getInputs()); assertEquals(OUTPUT_TOPIC_NAME, updater.getOutput()); assertEquals(updater.getFunctionConfig().getResources().getDisk(), Long.valueOf(8080808080808080L)); // cpu/Ram should be default assertEquals(updater.getFunctionConfig().getResources().getRam(), Long.valueOf(1073741824L)); assertEquals(updater.getFunctionConfig().getResources().getCpu(), 1.0, 0); verify(functions, times(1)).updateFunctionWithUrl( any(FunctionConfig.class), anyString(), eq(new UpdateOptionsImpl())); } @Test public void testUpdateAuthData() throws Exception { cmd.run(new String[] { "update", "--name", FN_NAME, "--inputs", INPUT_TOPIC_NAME, "--output", OUTPUT_TOPIC_NAME, "--jar", URL, "--tenant", "sample", "--namespace", "ns1", "--className", DummyFunction.class.getName(), "--disk", "8080808080808080", "--update-auth-data" }); UpdateFunction updater = cmd.getUpdater(); assertEquals(FN_NAME, updater.getFunctionName()); assertEquals(INPUT_TOPIC_NAME, updater.getInputs()); assertEquals(OUTPUT_TOPIC_NAME, updater.getOutput()); assertEquals(updater.getFunctionConfig().getResources().getDisk(), Long.valueOf(8080808080808080L)); // cpu/Ram should be default assertEquals(updater.getFunctionConfig().getResources().getRam(), Long.valueOf(1073741824L)); assertEquals(updater.getFunctionConfig().getResources().getCpu(), 1.0, 0); UpdateOptionsImpl updateOptions = new UpdateOptionsImpl(); updateOptions.setUpdateAuthData(true); verify(functions, times(1)).updateFunctionWithUrl(any(FunctionConfig.class), anyString(), eq(updateOptions)); } @Test public void testDownloadFunction() throws Exception { cmd.run(new String[] { "download", "--destination-file", JAR_NAME, "--name", FN_NAME, "--tenant", TENANT, "--namespace", NAMESPACE }); verify(functions, times(1)) .downloadFunction(JAR_NAME, TENANT, NAMESPACE, FN_NAME, false); } @Test public void testDownloadFunctionByPath() throws Exception { cmd.run(new String[] { "download", "--destination-file", JAR_NAME, "--path", PACKAGE_URL }); verify(functions, times(1)) .downloadFunction(JAR_NAME, PACKAGE_URL); } @Test public void testDownloadTransformFunction() throws Exception { cmd.run(new String[] { "download", "--destination-file", JAR_NAME, "--name", FN_NAME, "--tenant", TENANT, "--namespace", NAMESPACE, "--transform-function" }); verify(functions, times(1)) .downloadFunction(JAR_NAME, TENANT, NAMESPACE, FN_NAME, true); } }
apache/inlong
36,284
inlong-sort/sort-flink/sort-flink-v1.13/sort-connectors/mongodb-cdc/src/main/java/org/apache/inlong/sort/cdc/mongodb/debezium/table/MongoDBConnectorDeserializationSchema.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.inlong.sort.cdc.mongodb.debezium.table; import org.apache.inlong.sort.cdc.base.debezium.DebeziumDeserializationSchema; import org.apache.inlong.sort.cdc.mongodb.debezium.utils.RecordUtils; import org.apache.inlong.sort.cdc.mongodb.table.filter.MongoDBRowKind; import org.apache.inlong.sort.cdc.mongodb.table.filter.MongoDBRowKindValidator; import com.mongodb.client.model.changestream.OperationType; import com.mongodb.internal.HexUtils; import com.ververica.cdc.connectors.mongodb.internal.MongoDBEnvelope; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.table.data.DecimalData; import org.apache.flink.table.data.GenericArrayData; import org.apache.flink.table.data.GenericMapData; import org.apache.flink.table.data.GenericRowData; import org.apache.flink.table.data.RowData; import org.apache.flink.table.data.StringData; import org.apache.flink.table.data.TimestampData; import org.apache.flink.table.types.logical.ArrayType; import org.apache.flink.table.types.logical.DecimalType; import org.apache.flink.table.types.logical.LogicalType; import org.apache.flink.table.types.logical.MapType; import org.apache.flink.table.types.logical.RowType; import org.apache.flink.table.types.logical.utils.LogicalTypeUtils; import org.apache.flink.types.RowKind; import org.apache.flink.util.Collector; import org.apache.kafka.connect.data.Schema; import org.apache.kafka.connect.data.Struct; import org.apache.kafka.connect.source.SourceRecord; import org.bson.BsonBinary; import org.bson.BsonBinarySubType; import org.bson.BsonDateTime; import org.bson.BsonDocument; import org.bson.BsonMaxKey; import org.bson.BsonMinKey; import org.bson.BsonRegularExpression; import org.bson.BsonTimestamp; import org.bson.BsonUndefined; import org.bson.BsonValue; import org.bson.codecs.BsonArrayCodec; import org.bson.codecs.EncoderContext; import org.bson.json.JsonWriter; import org.bson.types.Decimal128; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.Serializable; import java.io.StringWriter; import java.io.Writer; import java.lang.reflect.Array; import java.math.BigDecimal; import java.time.Instant; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; import java.time.ZoneId; import java.time.ZonedDateTime; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import static java.time.format.DateTimeFormatter.ISO_OFFSET_DATE_TIME; import static org.apache.flink.util.Preconditions.checkArgument; import static org.apache.flink.util.Preconditions.checkNotNull; public class MongoDBConnectorDeserializationSchema implements DebeziumDeserializationSchema<RowData> { protected static final Logger LOG = LoggerFactory.getLogger(MongoDBConnectorDeserializationSchema.class); private static final long serialVersionUID = 1750787080613035184L; /** * TypeInformation of the produced {@link RowData}. */ private final TypeInformation<RowData> resultTypeInfo; /** * Local Time zone. */ private final ZoneId localTimeZone; /** * Runtime converter that converts {@link * com.mongodb.client.model.changestream.ChangeStreamDocument}s into {@link RowData} consisted * of physical column values. */ protected final DeserializationRuntimeConverter physicalConverter; /** * Whether the deserializer needs to handle metadata columns. */ protected final boolean hasMetadata; /** * A wrapped output collector which is used to append metadata columns after physical columns. */ private final AppendMetadataCollector appendMetadataCollector; private boolean sourceMultipleEnable = false; /** * Validator to validate the row value. */ private final MongoDBRowKindValidator rowKindValidator; public MongoDBConnectorDeserializationSchema( RowType physicalDataType, MetadataConverter[] metadataConverters, TypeInformation<RowData> resultTypeInfo, ZoneId localTimeZone, MongoDBRowKindValidator rowValidator, boolean sourceMultipleEnable) { this.hasMetadata = checkNotNull(metadataConverters).length > 0; this.sourceMultipleEnable = sourceMultipleEnable; this.appendMetadataCollector = new AppendMetadataCollector(metadataConverters, sourceMultipleEnable); this.physicalConverter = createConverter(physicalDataType); this.resultTypeInfo = resultTypeInfo; this.localTimeZone = localTimeZone; this.rowKindValidator = rowValidator; } @Override public void deserialize(SourceRecord record, Collector<RowData> out) throws Exception { Struct value = (Struct) record.value(); Schema valueSchema = record.valueSchema(); OperationType op = operationTypeFor(record); BsonDocument documentKey = extractBsonDocument( value, valueSchema, MongoDBEnvelope.DOCUMENT_KEY_FIELD); BsonDocument fullDocument = extractBsonDocument(value, valueSchema, MongoDBEnvelope.FULL_DOCUMENT_FIELD); switch (op) { case INSERT: GenericRowData insert = extractRowData(fullDocument); insert.setRowKind(RowKind.INSERT); emit(record, insert, out); break; case DELETE: GenericRowData delete = extractRowData(documentKey); delete.setRowKind(RowKind.DELETE); emit(record, delete, out); break; case UPDATE: // It’s null if another operation deletes the document // before the lookup operation happens. Ignored it. if (fullDocument == null) { break; } GenericRowData updateAfter = extractRowData(fullDocument); updateAfter.setRowKind(RowKind.UPDATE_AFTER); emit(record, updateAfter, out); break; case REPLACE: GenericRowData replaceAfter = extractRowData(fullDocument); replaceAfter.setRowKind(RowKind.UPDATE_AFTER); emit(record, replaceAfter, out); break; case INVALIDATE: case DROP: if (!rowKindValidator.validate(MongoDBRowKind.DROP)) { return; } GenericRowData drop = extractMongoDBDdlData(value, MongoDBEnvelope.NAMESPACE_FIELD, OperationType.DROP.getValue()); drop.setRowKind(RowKind.INSERT); emitDdlElement(record, drop, out); break; case DROP_DATABASE: if (!rowKindValidator.validate(MongoDBRowKind.DROP_DATABASE)) { return; } GenericRowData dropDatabase = extractMongoDBDdlData(value, MongoDBEnvelope.NAMESPACE_FIELD, OperationType.DROP_DATABASE.getValue()); dropDatabase.setRowKind(RowKind.INSERT); emitDdlElement(record, dropDatabase, out); break; case RENAME: if (!rowKindValidator.validate(MongoDBRowKind.RENAME)) { return; } GenericRowData rename = extractMongoDBDdlData(value, RecordUtils.DOCUMENT_TO_FIELD, OperationType.RENAME.getValue()); rename.setRowKind(RowKind.INSERT); emitDdlElement(record, rename, out); break; case OTHER: default: break; } } @Override public void deserialize(SourceRecord record, Collector<RowData> out, Boolean isStreamingPhase) throws Exception { this.deserialize(record, out); } private GenericRowData extractMongoDBDdlData(Struct value, String keyFiled, String ddlType) { Struct documentTo = (Struct) value.get(keyFiled); String newDb = documentTo.getString(MongoDBEnvelope.NAMESPACE_DATABASE_FIELD); String newColl = documentTo.getString(MongoDBEnvelope.NAMESPACE_COLLECTION_FIELD); String renameFormatStr = newDb + "." + newColl; Map<String, Object> data = new HashMap<>(); Map<String, String> dataType = new HashMap<>(); data.put(ddlType, renameFormatStr); dataType.put(ddlType, "VARCHAR"); GenericRowData ddlRowData = new GenericRowData(2); ddlRowData.setField(0, data); ddlRowData.setField(1, dataType); return ddlRowData; } private GenericRowData extractRowData(BsonDocument document) throws Exception { checkNotNull(document); return (GenericRowData) physicalConverter.convert(document); } private BsonDocument extractBsonDocument(Struct value, Schema valueSchema, String fieldName) { if (valueSchema.field(fieldName) != null) { String docString = value.getString(fieldName); if (docString != null) { return BsonDocument.parse(docString); } } return null; } @Override public TypeInformation<RowData> getProducedType() { return resultTypeInfo; } private OperationType operationTypeFor(SourceRecord record) { Struct value = (Struct) record.value(); return OperationType.fromString(value.getString(MongoDBEnvelope.OPERATION_TYPE_FIELD)); } private void emit(SourceRecord inRecord, RowData physicalRow, Collector<RowData> collector) { // filter the records that is outside the `rowKind` if (!rowKindValidator.validate(physicalRow.getRowKind())) { return; } if (!hasMetadata) { collector.collect(physicalRow); return; } appendMetadataCollector.inputRecord = inRecord; appendMetadataCollector.outputCollector = collector; appendMetadataCollector.collect(physicalRow); } private void emitDdlElement(SourceRecord inRecord, RowData physicalRow, Collector<RowData> collector) { if (!hasMetadata) { collector.collect(physicalRow); return; } appendMetadataCollector.inputRecord = inRecord; appendMetadataCollector.outputCollector = collector; appendMetadataCollector.collect(physicalRow); } // ------------------------------------------------------------------------------------- // Runtime Converters // ------------------------------------------------------------------------------------- /** * Runtime converter that converts objects of MongoDB Connect into objects of Flink Table & SQL * internal data structures. */ @FunctionalInterface private interface DeserializationRuntimeConverter extends Serializable { Object convert(BsonValue docObj) throws Exception; } /** * Creates a runtime converter which is null safe. */ private DeserializationRuntimeConverter createConverter(LogicalType type) { return wrapIntoNullableConverter(createNotNullConverter(type)); } /** * Creates a runtime converter which assuming input object is not null. */ private DeserializationRuntimeConverter createNotNullConverter(LogicalType type) { switch (type.getTypeRoot()) { case NULL: return (docObj) -> null; case BOOLEAN: return this::convertToBoolean; case TINYINT: return this::convertToTinyInt; case SMALLINT: return this::convertToSmallInt; case INTEGER: case INTERVAL_YEAR_MONTH: return this::convertToInt; case BIGINT: case INTERVAL_DAY_TIME: return this::convertToLong; case DATE: return this::convertToDate; case TIME_WITHOUT_TIME_ZONE: return this::convertToTime; case TIMESTAMP_WITHOUT_TIME_ZONE: return this::convertToTimestamp; case TIMESTAMP_WITH_LOCAL_TIME_ZONE: return this::convertToLocalTimeZoneTimestamp; case FLOAT: return this::convertToFloat; case DOUBLE: return this::convertToDouble; case CHAR: case VARCHAR: return this::convertToString; case BINARY: case VARBINARY: return this::convertToBinary; case DECIMAL: return createDecimalConverter((DecimalType) type); case ROW: return createRowConverter((RowType) type); case ARRAY: return createArrayConverter((ArrayType) type); case MAP: return createMapConverter((MapType) type); case MULTISET: case RAW: default: throw new UnsupportedOperationException("Unsupported type: " + type); } } private boolean convertToBoolean(BsonValue docObj) { if (docObj.isBoolean()) { return docObj.asBoolean().getValue(); } if (docObj.isInt32()) { return docObj.asInt32().getValue() == 1; } if (docObj.isInt64()) { return docObj.asInt64().getValue() == 1L; } if (docObj.isString()) { return Boolean.parseBoolean(docObj.asString().getValue()); } throw new IllegalArgumentException( "Unable to convert to boolean from unexpected value '" + docObj + "' of type " + docObj.getBsonType()); } private byte convertToTinyInt(BsonValue docObj) { if (docObj.isBoolean()) { return docObj.asBoolean().getValue() ? (byte) 1 : (byte) 0; } if (docObj.isInt32()) { return (byte) docObj.asInt32().getValue(); } if (docObj.isInt64()) { return (byte) docObj.asInt64().getValue(); } if (docObj.isString()) { return Byte.parseByte(docObj.asString().getValue()); } throw new IllegalArgumentException( "Unable to convert to tinyint from unexpected value '" + docObj + "' of type " + docObj.getBsonType()); } private short convertToSmallInt(BsonValue docObj) { if (docObj.isBoolean()) { return docObj.asBoolean().getValue() ? (short) 1 : (short) 0; } if (docObj.isInt32()) { return (short) docObj.asInt32().getValue(); } if (docObj.isInt64()) { return (short) docObj.asInt64().getValue(); } if (docObj.isString()) { return Short.parseShort(docObj.asString().getValue()); } throw new IllegalArgumentException( "Unable to convert to smallint from unexpected value '" + docObj + "' of type " + docObj.getBsonType()); } private int convertToInt(BsonValue docObj) { if (docObj.isNumber()) { return docObj.asNumber().intValue(); } if (docObj.isDecimal128()) { Decimal128 decimal128Value = docObj.asDecimal128().decimal128Value(); if (decimal128Value.isFinite()) { return decimal128Value.intValue(); } else if (decimal128Value.isNegative()) { return Integer.MIN_VALUE; } else { return Integer.MAX_VALUE; } } if (docObj.isBoolean()) { return docObj.asBoolean().getValue() ? 1 : 0; } if (docObj.isDateTime()) { return Math.toIntExact(docObj.asDateTime().getValue() / 1000L); } if (docObj.isTimestamp()) { return docObj.asTimestamp().getTime(); } if (docObj.isString()) { return Integer.parseInt(docObj.asString().getValue()); } throw new IllegalArgumentException( "Unable to convert to integer from unexpected value '" + docObj + "' of type " + docObj.getBsonType()); } private long convertToLong(BsonValue docObj) { if (docObj.isNumber()) { return docObj.asNumber().longValue(); } if (docObj.isDecimal128()) { Decimal128 decimal128Value = docObj.asDecimal128().decimal128Value(); if (decimal128Value.isFinite()) { return decimal128Value.longValue(); } else if (decimal128Value.isNegative()) { return Long.MIN_VALUE; } else { return Long.MAX_VALUE; } } if (docObj.isBoolean()) { return docObj.asBoolean().getValue() ? 1L : 0L; } if (docObj.isDateTime()) { return docObj.asDateTime().getValue(); } if (docObj.isTimestamp()) { return docObj.asTimestamp().getTime() * 1000L; } if (docObj.isString()) { return Long.parseLong(docObj.asString().getValue()); } throw new IllegalArgumentException( "Unable to convert to long from unexpected value '" + docObj + "' of type " + docObj.getBsonType()); } private double convertToDouble(BsonValue docObj) { if (docObj.isNumber()) { return docObj.asNumber().doubleValue(); } if (docObj.isDecimal128()) { Decimal128 decimal128Value = docObj.asDecimal128().decimal128Value(); if (decimal128Value.isFinite()) { return decimal128Value.doubleValue(); } else if (decimal128Value.isNegative()) { return -Double.MAX_VALUE; } else { return Double.MAX_VALUE; } } if (docObj.isBoolean()) { return docObj.asBoolean().getValue() ? 1 : 0; } if (docObj.isString()) { return Double.parseDouble(docObj.asString().getValue()); } throw new IllegalArgumentException( "Unable to convert to double from unexpected value '" + docObj + "' of type " + docObj.getBsonType()); } private float convertToFloat(BsonValue docObj) { if (docObj.isInt32()) { return docObj.asInt32().getValue(); } if (docObj.isInt64()) { return docObj.asInt64().getValue(); } if (docObj.isDouble()) { return ((Double) docObj.asDouble().getValue()).floatValue(); } if (docObj.isDecimal128()) { Decimal128 decimal128Value = docObj.asDecimal128().decimal128Value(); if (decimal128Value.isFinite()) { return decimal128Value.floatValue(); } else if (decimal128Value.isNegative()) { return -Float.MAX_VALUE; } else { return Float.MAX_VALUE; } } if (docObj.isBoolean()) { return docObj.asBoolean().getValue() ? 1f : 0f; } if (docObj.isString()) { return Float.parseFloat(docObj.asString().getValue()); } throw new IllegalArgumentException( "Unable to convert to float from unexpected value '" + docObj + "' of type " + docObj.getBsonType()); } private LocalDate convertInstantToLocalDate(Instant instant) { return convertInstantToLocalDateTime(instant).toLocalDate(); } private LocalTime convertInstantToLocalTime(Instant instant) { return convertInstantToLocalDateTime(instant).toLocalTime(); } private LocalDateTime convertInstantToLocalDateTime(Instant instant) { return LocalDateTime.ofInstant(instant, localTimeZone); } private ZonedDateTime convertInstantToZonedDateTime(Instant instant) { return ZonedDateTime.ofInstant(instant, localTimeZone); } private Instant convertToInstant(BsonTimestamp bsonTimestamp) { return Instant.ofEpochSecond(bsonTimestamp.getTime()); } private Instant convertToInstant(BsonDateTime bsonDateTime) { return Instant.ofEpochMilli(bsonDateTime.getValue()); } /** * A conversion from DateType to int describes the number of days since epoch. * * @see org.apache.flink.table.types.logical.DateType */ private int convertToDate(BsonValue docObj) { if (docObj.isDateTime()) { Instant instant = convertToInstant(docObj.asDateTime()); return (int) convertInstantToLocalDate(instant).toEpochDay(); } if (docObj.isTimestamp()) { Instant instant = convertToInstant(docObj.asTimestamp()); return (int) convertInstantToLocalDate(instant).toEpochDay(); } throw new IllegalArgumentException( "Unable to convert to date from unexpected value '" + docObj + "' of type " + docObj.getBsonType()); } /** * A conversion from TimeType to int describes the number of milliseconds of the day. * * @see org.apache.flink.table.types.logical.TimeType */ private int convertToTime(BsonValue docObj) { if (docObj.isDateTime()) { Instant instant = convertToInstant(docObj.asDateTime()); return convertInstantToLocalTime(instant).toSecondOfDay() * 1000; } if (docObj.isTimestamp()) { Instant instant = convertToInstant(docObj.asTimestamp()); return convertInstantToLocalTime(instant).toSecondOfDay() * 1000; } throw new IllegalArgumentException( "Unable to convert to time from unexpected value '" + docObj + "' of type " + docObj.getBsonType()); } private TimestampData convertToTimestamp(BsonValue docObj) { if (docObj.isDateTime()) { return TimestampData.fromLocalDateTime( convertInstantToLocalDateTime(convertToInstant(docObj.asDateTime()))); } if (docObj.isTimestamp()) { return TimestampData.fromLocalDateTime( convertInstantToLocalDateTime(convertToInstant(docObj.asTimestamp()))); } throw new IllegalArgumentException( "Unable to convert to timestamp from unexpected value '" + docObj + "' of type " + docObj.getBsonType()); } private TimestampData convertToLocalTimeZoneTimestamp(BsonValue docObj) { if (docObj.isDateTime()) { return TimestampData.fromEpochMillis(docObj.asDateTime().getValue()); } if (docObj.isTimestamp()) { return TimestampData.fromEpochMillis(docObj.asTimestamp().getTime() * 1000L); } throw new IllegalArgumentException( "Unable to convert to timestamp with local timezone from unexpected value '" + docObj + "' of type " + docObj.getBsonType()); } private byte[] convertToBinary(BsonValue docObj) { if (docObj.isBinary()) { return docObj.asBinary().getData(); } throw new UnsupportedOperationException( "Unsupported BYTES value type: " + docObj.getClass().getSimpleName()); } private StringData convertToString(BsonValue docObj) { if (docObj.isString()) { return StringData.fromString(docObj.asString().getValue()); } if (docObj.isDocument()) { // convert document to json string return StringData.fromString(docObj.asDocument().toJson()); } if (docObj.isBinary()) { BsonBinary bsonBinary = docObj.asBinary(); if (BsonBinarySubType.isUuid(bsonBinary.getType())) { return StringData.fromString(bsonBinary.asUuid().toString()); } return StringData.fromString(HexUtils.toHex(bsonBinary.getData())); } if (docObj.isObjectId()) { return StringData.fromString(docObj.asObjectId().getValue().toHexString()); } if (docObj.isInt32()) { return StringData.fromString(String.valueOf(docObj.asInt32().getValue())); } if (docObj.isInt64()) { return StringData.fromString(String.valueOf(docObj.asInt64().getValue())); } if (docObj.isDouble()) { return StringData.fromString(String.valueOf(docObj.asDouble().getValue())); } if (docObj.isDecimal128()) { return StringData.fromString(docObj.asDecimal128().getValue().toString()); } if (docObj.isBoolean()) { return StringData.fromString(String.valueOf(docObj.asBoolean().getValue())); } if (docObj.isDateTime()) { Instant instant = convertToInstant(docObj.asDateTime()); return StringData.fromString( convertInstantToZonedDateTime(instant).format(ISO_OFFSET_DATE_TIME)); } if (docObj.isTimestamp()) { Instant instant = convertToInstant(docObj.asTimestamp()); return StringData.fromString( convertInstantToZonedDateTime(instant).format(ISO_OFFSET_DATE_TIME)); } if (docObj.isArray()) { // convert bson array to json string Writer writer = new StringWriter(); JsonWriter jsonArrayWriter = new JsonWriter(writer) { @Override public void writeStartArray() { doWriteStartArray(); setState(State.VALUE); } @Override public void writeEndArray() { doWriteEndArray(); setState(getNextState()); } }; new BsonArrayCodec() .encode(jsonArrayWriter, docObj.asArray(), EncoderContext.builder().build()); return StringData.fromString(writer.toString()); } if (docObj.isRegularExpression()) { BsonRegularExpression regex = docObj.asRegularExpression(); return StringData.fromString( String.format("/%s/%s", regex.getPattern(), regex.getOptions())); } if (docObj.isJavaScript()) { return StringData.fromString(docObj.asJavaScript().getCode()); } if (docObj.isJavaScriptWithScope()) { return StringData.fromString(docObj.asJavaScriptWithScope().getCode()); } if (docObj.isSymbol()) { return StringData.fromString(docObj.asSymbol().getSymbol()); } if (docObj.isDBPointer()) { return StringData.fromString(docObj.asDBPointer().getId().toHexString()); } if (docObj instanceof BsonMinKey || docObj instanceof BsonMaxKey) { return StringData.fromString(docObj.getBsonType().name()); } throw new IllegalArgumentException( "Unable to convert to string from unexpected value '" + docObj + "' of type " + docObj.getBsonType()); } private DeserializationRuntimeConverter createDecimalConverter(DecimalType decimalType) { final int precision = decimalType.getPrecision(); final int scale = decimalType.getScale(); return (docObj) -> { BigDecimal bigDecimal; if (docObj.isString()) { bigDecimal = new BigDecimal(docObj.asString().getValue()); } else if (docObj.isDecimal128()) { Decimal128 decimal128Value = docObj.asDecimal128().decimal128Value(); if (decimal128Value.isFinite()) { bigDecimal = docObj.asDecimal128().decimal128Value().bigDecimalValue(); } else if (decimal128Value.isNegative()) { bigDecimal = BigDecimal.valueOf(-Double.MAX_VALUE); } else { bigDecimal = BigDecimal.valueOf(Double.MAX_VALUE); } } else if (docObj.isDouble()) { bigDecimal = BigDecimal.valueOf(docObj.asDouble().doubleValue()); } else if (docObj.isInt32()) { bigDecimal = BigDecimal.valueOf(docObj.asInt32().getValue()); } else if (docObj.isInt64()) { bigDecimal = BigDecimal.valueOf(docObj.asInt64().getValue()); } else { throw new IllegalArgumentException( "Unable to convert to decimal from unexpected value '" + docObj + "' of type " + docObj.getBsonType()); } return DecimalData.fromBigDecimal(bigDecimal, precision, scale); }; } private DeserializationRuntimeConverter createRowConverter(RowType rowType) { final DeserializationRuntimeConverter[] fieldConverters = rowType.getFields().stream() .map(RowType.RowField::getType) .map(this::createConverter) .toArray(DeserializationRuntimeConverter[]::new); final String[] fieldNames = rowType.getFieldNames().toArray(new String[0]); return (docObj) -> { if (!docObj.isDocument()) { throw new IllegalArgumentException( "Unable to convert to rowType from unexpected value '" + docObj + "' of type " + docObj.getBsonType()); } BsonDocument document = docObj.asDocument(); if (!sourceMultipleEnable) { int arity = fieldNames.length; GenericRowData row = new GenericRowData(arity); for (int i = 0; i < arity; i++) { String fieldName = fieldNames[i]; BsonValue fieldValue = document.get(fieldName); Object convertedField = convertField(fieldConverters[i], fieldValue); row.setField(i, convertedField); } return row; } else { Map<String, Object> data = new LinkedHashMap<>(); Map<String, String> dataType = new LinkedHashMap<>(); document.forEach((key, value) -> { try { LogicalType logicalType = RecordUtils.convertLogicType(value); dataType.put(key, logicalType.getTypeRoot().name()); Object fieldValue = convertField(createConverter(logicalType), value); if (fieldValue instanceof DecimalData) { fieldValue = fieldValue.toString(); } if (fieldValue instanceof TimestampData) { fieldValue = ((TimestampData) fieldValue).toTimestamp(); dataType.put(key, "TIMESTAMP"); } if (fieldValue instanceof StringData) { fieldValue = fieldValue.toString(); } data.put(key, fieldValue); } catch (Exception e) { LOG.error("parse value get err:", e); } }); GenericRowData row = new GenericRowData(2); row.setField(0, data); row.setField(1, dataType); return row; } }; } private DeserializationRuntimeConverter createArrayConverter(ArrayType arrayType) { final Class<?> elementClass = LogicalTypeUtils.toInternalConversionClass(arrayType.getElementType()); final DeserializationRuntimeConverter elementConverter = createConverter(arrayType.getElementType()); return (docObj) -> { if (!docObj.isArray()) { throw new IllegalArgumentException( "Unable to convert to arrayType from unexpected value '" + docObj + "' of type " + docObj.getBsonType()); } List<BsonValue> in = docObj.asArray(); final Object[] elementArray = (Object[]) Array.newInstance(elementClass, in.size()); for (int i = 0; i < in.size(); i++) { elementArray[i] = elementConverter.convert(in.get(i)); } return new GenericArrayData(elementArray); }; } private DeserializationRuntimeConverter createMapConverter(MapType mapType) { LogicalType keyType = mapType.getKeyType(); checkArgument(keyType.supportsInputConversion(String.class)); LogicalType valueType = mapType.getValueType(); DeserializationRuntimeConverter valueConverter = createConverter(valueType); return (docObj) -> { if (!docObj.isDocument()) { throw new IllegalArgumentException( "Unable to convert to rowType from unexpected value '" + docObj + "' of type " + docObj.getBsonType()); } BsonDocument document = docObj.asDocument(); Map<StringData, Object> map = new HashMap<>(); for (String key : document.keySet()) { map.put( StringData.fromString(key), convertField(valueConverter, document.get(key))); } return new GenericMapData(map); }; } private Object convertField( DeserializationRuntimeConverter fieldConverter, BsonValue fieldValue) throws Exception { if (fieldValue == null) { return null; } else { return fieldConverter.convert(fieldValue); } } private DeserializationRuntimeConverter wrapIntoNullableConverter( DeserializationRuntimeConverter converter) { return (docObj) -> { if (docObj == null || docObj.isNull() || docObj instanceof BsonUndefined) { return null; } if (docObj.isDecimal128() && docObj.asDecimal128().getValue().isNaN()) { return null; } return converter.convert(docObj); }; } }
apache/rya
35,859
extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/KafkaExportIT.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.rya.indexing.pcj.fluo.integration; import static java.util.Objects.requireNonNull; import static org.junit.Assert.assertEquals; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.UUID; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.rya.api.model.VisibilityBindingSet; import org.apache.rya.indexing.pcj.storage.accumulo.VariableOrder; import org.apache.rya.pcj.fluo.test.base.KafkaExportITBase; import org.eclipse.rdf4j.model.Statement; import org.eclipse.rdf4j.model.ValueFactory; import org.eclipse.rdf4j.model.impl.SimpleValueFactory; import org.eclipse.rdf4j.model.vocabulary.XMLSchema; import org.eclipse.rdf4j.query.BindingSet; import org.eclipse.rdf4j.query.impl.MapBindingSet; import org.junit.Test; import com.google.common.collect.Sets; /** * Performs integration tests over the Fluo application geared towards Kafka PCJ exporting. * <p> * These tests might be ignored so that they will not run as unit tests while building the application. * Run this test from Maven command line: * $ cd rya/extras/rya.pcj.fluo/pcj.fluo.integration * $ mvn surefire:test -Dtest=KafkaExportIT */ public class KafkaExportIT extends KafkaExportITBase { @Test public void newResultsExportedTest() throws Exception { final String sparql = "SELECT ?customer ?worker ?city { " + "FILTER(?customer = <http://Alice>) " + "FILTER(?city = <http://London>) " + "?customer <http://talksTo> ?worker. " + "?worker <http://livesIn> ?city. " + "?worker <http://worksAt> <http://Chipotle>. " + "}"; // Triples that will be streamed into Fluo after the PCJ has been created. final ValueFactory vf = SimpleValueFactory.getInstance(); final Collection<Statement> statements = Sets.newHashSet( vf.createStatement(vf.createIRI("http://Alice"), vf.createIRI("http://talksTo"), vf.createIRI("http://Bob")), vf.createStatement(vf.createIRI("http://Bob"), vf.createIRI("http://livesIn"), vf.createIRI("http://London")), vf.createStatement(vf.createIRI("http://Bob"), vf.createIRI("http://worksAt"), vf.createIRI("http://Chipotle")), vf.createStatement(vf.createIRI("http://Alice"), vf.createIRI("http://talksTo"), vf.createIRI("http://Charlie")), vf.createStatement(vf.createIRI("http://Charlie"), vf.createIRI("http://livesIn"), vf.createIRI("http://London")), vf.createStatement(vf.createIRI("http://Charlie"), vf.createIRI("http://worksAt"), vf.createIRI("http://Chipotle")), vf.createStatement(vf.createIRI("http://Alice"), vf.createIRI("http://talksTo"), vf.createIRI("http://David")), vf.createStatement(vf.createIRI("http://David"), vf.createIRI("http://livesIn"), vf.createIRI("http://London")), vf.createStatement(vf.createIRI("http://David"), vf.createIRI("http://worksAt"), vf.createIRI("http://Chipotle")), vf.createStatement(vf.createIRI("http://Alice"), vf.createIRI("http://talksTo"), vf.createIRI("http://Eve")), vf.createStatement(vf.createIRI("http://Eve"), vf.createIRI("http://livesIn"), vf.createIRI("http://Leeds")), vf.createStatement(vf.createIRI("http://Eve"), vf.createIRI("http://worksAt"), vf.createIRI("http://Chipotle")), vf.createStatement(vf.createIRI("http://Frank"), vf.createIRI("http://talksTo"), vf.createIRI("http://Alice")), vf.createStatement(vf.createIRI("http://Frank"), vf.createIRI("http://livesIn"), vf.createIRI("http://London")), vf.createStatement(vf.createIRI("http://Frank"), vf.createIRI("http://worksAt"), vf.createIRI("http://Chipotle"))); // Create the PCJ in Fluo and load the statements into Rya. final String pcjId = loadDataAndCreateQuery(sparql, statements); // The expected results of the SPARQL query once the PCJ has been computed. final Set<BindingSet> expectedResult = new HashSet<>(); MapBindingSet bs = new MapBindingSet(); bs.addBinding("customer", vf.createIRI("http://Alice")); bs.addBinding("worker", vf.createIRI("http://Bob")); bs.addBinding("city", vf.createIRI("http://London")); expectedResult.add( new VisibilityBindingSet(bs) ); bs = new MapBindingSet(); bs.addBinding("customer", vf.createIRI("http://Alice")); bs.addBinding("worker", vf.createIRI("http://Charlie")); bs.addBinding("city", vf.createIRI("http://London")); expectedResult.add( new VisibilityBindingSet(bs) ); bs = new MapBindingSet(); bs.addBinding("customer", vf.createIRI("http://Alice")); bs.addBinding("worker", vf.createIRI("http://David")); bs.addBinding("city", vf.createIRI("http://London")); expectedResult.add( new VisibilityBindingSet(bs) ); // Ensure the last result matches the expected result. final Set<VisibilityBindingSet> result = readAllResults(pcjId); assertEquals(expectedResult, result); } @Test public void min() throws Exception { // A query that finds the minimum price for an item within the inventory. final String sparql = "SELECT (min(?price) as ?minPrice) { " + "?item <urn:price> ?price . " + "}"; // Create the Statements that will be loaded into Rya. final ValueFactory vf = SimpleValueFactory.getInstance(); final Collection<Statement> statements = Sets.newHashSet( vf.createStatement(vf.createIRI("urn:apple"), vf.createIRI("urn:price"), vf.createLiteral(2.50)), vf.createStatement(vf.createIRI("urn:gum"), vf.createIRI("urn:price"), vf.createLiteral(0.99)), vf.createStatement(vf.createIRI("urn:sandwich"), vf.createIRI("urn:price"), vf.createLiteral(4.99))); // Create the PCJ in Fluo and load the statements into Rya. final String pcjId = loadDataAndCreateQuery(sparql, statements); // Create the expected results of the SPARQL query once the PCJ has been computed. final MapBindingSet expectedResult = new MapBindingSet(); expectedResult.addBinding("minPrice", vf.createLiteral(0.99)); // Ensure the last result matches the expected result. final VisibilityBindingSet result = readLastResult(pcjId); assertEquals(expectedResult, result); } @Test public void max() throws Exception { // A query that finds the maximum price for an item within the inventory. final String sparql = "SELECT (max(?price) as ?maxPrice) { " + "?item <urn:price> ?price . " + "}"; // Create the Statements that will be loaded into Rya. final ValueFactory vf = SimpleValueFactory.getInstance(); final Collection<Statement> statements = Sets.newHashSet( vf.createStatement(vf.createIRI("urn:apple"), vf.createIRI("urn:price"), vf.createLiteral(2.50)), vf.createStatement(vf.createIRI("urn:gum"), vf.createIRI("urn:price"), vf.createLiteral(0.99)), vf.createStatement(vf.createIRI("urn:sandwich"), vf.createIRI("urn:price"), vf.createLiteral(4.99))); // Create the PCJ in Fluo and load the statements into Rya. final String pcjId = loadDataAndCreateQuery(sparql, statements); // Create the expected results of the SPARQL query once the PCJ has been computed. final MapBindingSet expectedResult = new MapBindingSet(); expectedResult.addBinding("maxPrice", vf.createLiteral(4.99)); // Ensure the last result matches the expected result. final VisibilityBindingSet result = readLastResult(pcjId); assertEquals(expectedResult, result); } @Test public void count() throws Exception { // A query that counts the number of unique items that are in the inventory. final String sparql = "SELECT (count(?item) as ?itemCount) { " + "?item <urn:id> ?id . " + "}"; // Create the Statements that will be loaded into Rya. final ValueFactory vf = SimpleValueFactory.getInstance(); final Collection<Statement> statements = Sets.newHashSet( // Three that are part of the count. vf.createStatement(vf.createIRI("urn:apple"), vf.createIRI("urn:id"), vf.createLiteral(UUID.randomUUID().toString())), vf.createStatement(vf.createIRI("urn:gum"), vf.createIRI("urn:id"), vf.createLiteral(UUID.randomUUID().toString())), vf.createStatement(vf.createIRI("urn:sandwich"), vf.createIRI("urn:id"), vf.createLiteral(UUID.randomUUID().toString())), // One that is not. vf.createStatement(vf.createIRI("urn:sandwich"), vf.createIRI("urn:price"), vf.createLiteral(3.99))); // Create the PCJ in Fluo and load the statements into Rya. final String pcjId = loadDataAndCreateQuery(sparql, statements); // Create the expected results of the SPARQL query once the PCJ has been computed. final MapBindingSet expectedResult = new MapBindingSet(); expectedResult.addBinding("itemCount", vf.createLiteral("3", XMLSchema.INTEGER)); // Ensure the last result matches the expected result. final VisibilityBindingSet result = readLastResult(pcjId); assertEquals(expectedResult, result); } @Test public void sum() throws Exception { // A query that sums the counts of all of the items that are in the inventory. final String sparql = "SELECT (sum(?count) as ?itemSum) { " + "?item <urn:count> ?count . " + "}"; // Create the Statements that will be loaded into Rya. final ValueFactory vf = SimpleValueFactory.getInstance(); final Collection<Statement> statements = Sets.newHashSet( vf.createStatement(vf.createIRI("urn:apple"), vf.createIRI("urn:count"), vf.createLiteral(5)), vf.createStatement(vf.createIRI("urn:gum"), vf.createIRI("urn:count"), vf.createLiteral(7)), vf.createStatement(vf.createIRI("urn:sandwich"), vf.createIRI("urn:count"), vf.createLiteral(2))); // Create the PCJ in Fluo and load the statements into Rya. final String pcjId = loadDataAndCreateQuery(sparql, statements); // Create the expected results of the SPARQL query once the PCJ has been computed. final MapBindingSet expectedResult = new MapBindingSet(); expectedResult.addBinding("itemSum", vf.createLiteral("14", XMLSchema.INTEGER)); // Ensure the last result matches the expected result. final VisibilityBindingSet result = readLastResult(pcjId); assertEquals(expectedResult, result); } @Test public void average() throws Exception { // A query that finds the average price for an item that is in the inventory. final String sparql = "SELECT (avg(?price) as ?averagePrice) { " + "?item <urn:price> ?price . " + "}"; // Create the Statements that will be loaded into Rya. final ValueFactory vf = SimpleValueFactory.getInstance(); final Collection<Statement> statements = Sets.newHashSet( vf.createStatement(vf.createIRI("urn:apple"), vf.createIRI("urn:price"), vf.createLiteral(3)), vf.createStatement(vf.createIRI("urn:gum"), vf.createIRI("urn:price"), vf.createLiteral(4)), vf.createStatement(vf.createIRI("urn:sandwich"), vf.createIRI("urn:price"), vf.createLiteral(8))); // Create the PCJ in Fluo and load the statements into Rya. final String pcjId = loadDataAndCreateQuery(sparql, statements); // Create the expected results of the SPARQL query once the PCJ has been computed. final MapBindingSet expectedResult = new MapBindingSet(); expectedResult.addBinding("averagePrice", vf.createLiteral("5", XMLSchema.DECIMAL)); // Ensure the last result matches the expected result. final VisibilityBindingSet result = readLastResult(pcjId); assertEquals(expectedResult, result); } @Test public void aggregateWithFilter() throws Exception { // A query that filters results from a statement pattern before applying the aggregation function. final String sparql = "SELECT (min(?price) as ?minPrice) { " + "FILTER(?price > 1.00) " + "?item <urn:price> ?price . " + "}"; // Create the Statements that will be loaded into Rya. final ValueFactory vf = SimpleValueFactory.getInstance(); final Collection<Statement> statements = Sets.newHashSet( vf.createStatement(vf.createIRI("urn:apple"), vf.createIRI("urn:price"), vf.createLiteral(2.50)), vf.createStatement(vf.createIRI("urn:gum"), vf.createIRI("urn:price"), vf.createLiteral(0.99)), vf.createStatement(vf.createIRI("urn:sandwich"), vf.createIRI("urn:price"), vf.createLiteral(4.99))); // Create the PCJ in Fluo and load the statements into Rya. final String pcjId = loadDataAndCreateQuery(sparql, statements); // Create the expected results of the SPARQL query once the PCJ has been computed. final MapBindingSet expectedResult = new MapBindingSet(); expectedResult.addBinding("minPrice", vf.createLiteral(2.50)); // Ensure the last result matches the expected result. final VisibilityBindingSet result = readLastResult(pcjId); assertEquals(expectedResult, result); } @Test public void multipleAggregations() throws Exception { // A query that both counts the number of items being averaged and finds the average price. final String sparql = "SELECT (count(?item) as ?itemCount) (avg(?price) as ?averagePrice) {" + "?item <urn:price> ?price . " + "}"; // Create the Statements that will be loaded into Rya. final ValueFactory vf = SimpleValueFactory.getInstance(); final Collection<Statement> statements = Sets.newHashSet( vf.createStatement(vf.createIRI("urn:apple"), vf.createIRI("urn:price"), vf.createLiteral(5.25)), vf.createStatement(vf.createIRI("urn:gum"), vf.createIRI("urn:price"), vf.createLiteral(7)), vf.createStatement(vf.createIRI("urn:sandwich"), vf.createIRI("urn:price"), vf.createLiteral(2.75))); // Create the PCJ in Fluo and load the statements into Rya. final String pcjId = loadDataAndCreateQuery(sparql, statements); // Create the expected results of the SPARQL query once the PCJ has been computed. final MapBindingSet expectedResult = new MapBindingSet(); expectedResult.addBinding("itemCount", vf.createLiteral("3", XMLSchema.INTEGER)); expectedResult.addBinding("averagePrice", vf.createLiteral("5.0", XMLSchema.DECIMAL)); // Ensure the last result matches the expected result. final VisibilityBindingSet result = readLastResult(pcjId); assertEquals(expectedResult, result); } @Test public void groupBySingleBinding() throws Exception { // A query that groups what is aggregated by one of the keys. final String sparql = "SELECT ?item (avg(?price) as ?averagePrice) {" + "?item <urn:price> ?price . " + "} " + "GROUP BY ?item"; // Create the Statements that will be loaded into Rya. final ValueFactory vf = SimpleValueFactory.getInstance(); final Collection<Statement> statements = Sets.newHashSet( vf.createStatement(vf.createIRI("urn:apple"), vf.createIRI("urn:price"), vf.createLiteral(5.25)), vf.createStatement(vf.createIRI("urn:apple"), vf.createIRI("urn:price"), vf.createLiteral(7)), vf.createStatement(vf.createIRI("urn:apple"), vf.createIRI("urn:price"), vf.createLiteral(2.75)), vf.createStatement(vf.createIRI("urn:banana"), vf.createIRI("urn:price"), vf.createLiteral(2.75)), vf.createStatement(vf.createIRI("urn:banana"), vf.createIRI("urn:price"), vf.createLiteral(1.99))); // Create the PCJ in Fluo and load the statements into Rya. final String pcjId = loadDataAndCreateQuery(sparql, statements); // Create the expected results of the SPARQL query once the PCJ has been computed. final Set<VisibilityBindingSet> expectedResults = new HashSet<>(); MapBindingSet bs = new MapBindingSet(); bs.addBinding("item", vf.createIRI("urn:apple")); bs.addBinding("averagePrice", vf.createLiteral("5.0", XMLSchema.DECIMAL)); expectedResults.add( new VisibilityBindingSet(bs) ); bs = new MapBindingSet(); bs.addBinding("item", vf.createIRI("urn:banana")); bs.addBinding("averagePrice", vf.createLiteral("2.37", XMLSchema.DECIMAL)); expectedResults.add( new VisibilityBindingSet(bs) ); // Verify the end results of the query match the expected results. final Set<VisibilityBindingSet> results = readGroupedResults(pcjId, new VariableOrder("item")); assertEquals(expectedResults, results); } @Test public void groupByManyBindings_averages() throws Exception { // A query that groups what is aggregated by two of the keys. final String sparql = "SELECT ?type ?location (avg(?price) as ?averagePrice) {" + "?id <urn:type> ?type . " + "?id <urn:location> ?location ." + "?id <urn:price> ?price ." + "} " + "GROUP BY ?type ?location"; // Create the Statements that will be loaded into Rya. final ValueFactory vf = SimpleValueFactory.getInstance(); final Collection<Statement> statements = Sets.newHashSet( // American items that will be averaged. vf.createStatement(vf.createIRI("urn:1"), vf.createIRI("urn:type"), vf.createLiteral("apple")), vf.createStatement(vf.createIRI("urn:1"), vf.createIRI("urn:location"), vf.createLiteral("USA")), vf.createStatement(vf.createIRI("urn:1"), vf.createIRI("urn:price"), vf.createLiteral(2.50)), vf.createStatement(vf.createIRI("urn:2"), vf.createIRI("urn:type"), vf.createLiteral("cheese")), vf.createStatement(vf.createIRI("urn:2"), vf.createIRI("urn:location"), vf.createLiteral("USA")), vf.createStatement(vf.createIRI("urn:2"), vf.createIRI("urn:price"), vf.createLiteral(.99)), vf.createStatement(vf.createIRI("urn:3"), vf.createIRI("urn:type"), vf.createLiteral("cheese")), vf.createStatement(vf.createIRI("urn:3"), vf.createIRI("urn:location"), vf.createLiteral("USA")), vf.createStatement(vf.createIRI("urn:3"), vf.createIRI("urn:price"), vf.createLiteral(5.25)), // French items that will be averaged. vf.createStatement(vf.createIRI("urn:4"), vf.createIRI("urn:type"), vf.createLiteral("cheese")), vf.createStatement(vf.createIRI("urn:4"), vf.createIRI("urn:location"), vf.createLiteral("France")), vf.createStatement(vf.createIRI("urn:4"), vf.createIRI("urn:price"), vf.createLiteral(8.5)), vf.createStatement(vf.createIRI("urn:5"), vf.createIRI("urn:type"), vf.createLiteral("cigarettes")), vf.createStatement(vf.createIRI("urn:5"), vf.createIRI("urn:location"), vf.createLiteral("France")), vf.createStatement(vf.createIRI("urn:5"), vf.createIRI("urn:price"), vf.createLiteral(3.99)), vf.createStatement(vf.createIRI("urn:6"), vf.createIRI("urn:type"), vf.createLiteral("cigarettes")), vf.createStatement(vf.createIRI("urn:6"), vf.createIRI("urn:location"), vf.createLiteral("France")), vf.createStatement(vf.createIRI("urn:6"), vf.createIRI("urn:price"), vf.createLiteral(4.99))); // Create the PCJ in Fluo and load the statements into Rya. final String pcjId = loadDataAndCreateQuery(sparql, statements); // Create the expected results of the SPARQL query once the PCJ has been computed. final Set<VisibilityBindingSet> expectedResults = new HashSet<>(); MapBindingSet bs = new MapBindingSet(); bs.addBinding("type", vf.createLiteral("apple", XMLSchema.STRING)); bs.addBinding("location", vf.createLiteral("USA", XMLSchema.STRING)); bs.addBinding("averagePrice", vf.createLiteral("2.5", XMLSchema.DECIMAL)); expectedResults.add( new VisibilityBindingSet(bs) ); bs = new MapBindingSet(); bs.addBinding("type", vf.createLiteral("cheese", XMLSchema.STRING)); bs.addBinding("location", vf.createLiteral("USA", XMLSchema.STRING)); bs.addBinding("averagePrice", vf.createLiteral("3.12", XMLSchema.DECIMAL)); expectedResults.add( new VisibilityBindingSet(bs) ); bs = new MapBindingSet(); bs.addBinding("type", vf.createLiteral("cheese", XMLSchema.STRING)); bs.addBinding("location", vf.createLiteral("France", XMLSchema.STRING)); bs.addBinding("averagePrice", vf.createLiteral("8.5", XMLSchema.DECIMAL)); expectedResults.add( new VisibilityBindingSet(bs)); bs = new MapBindingSet(); bs.addBinding("type", vf.createLiteral("cigarettes", XMLSchema.STRING)); bs.addBinding("location", vf.createLiteral("France", XMLSchema.STRING)); bs.addBinding("averagePrice", vf.createLiteral("4.49", XMLSchema.DECIMAL)); expectedResults.add( new VisibilityBindingSet(bs) ); // Verify the end results of the query match the expected results. final Set<VisibilityBindingSet> results = readGroupedResults(pcjId, new VariableOrder("type", "location")); assertEquals(expectedResults, results); } @Test public void nestedGroupByManyBindings_averages() throws Exception { // A query that groups what is aggregated by two of the keys. final String sparql = "SELECT ?type ?location ?averagePrice {" + "FILTER(?averagePrice > 4) " + "{SELECT ?type ?location (avg(?price) as ?averagePrice) {" + "?id <urn:type> ?type . " + "?id <urn:location> ?location ." + "?id <urn:price> ?price ." + "} " + "GROUP BY ?type ?location }}"; // Create the Statements that will be loaded into Rya. final ValueFactory vf = SimpleValueFactory.getInstance(); final Collection<Statement> statements = Sets.newHashSet( // American items that will be averaged. vf.createStatement(vf.createIRI("urn:1"), vf.createIRI("urn:type"), vf.createLiteral("apple")), vf.createStatement(vf.createIRI("urn:1"), vf.createIRI("urn:location"), vf.createLiteral("USA")), vf.createStatement(vf.createIRI("urn:1"), vf.createIRI("urn:price"), vf.createLiteral(2.50)), vf.createStatement(vf.createIRI("urn:2"), vf.createIRI("urn:type"), vf.createLiteral("cheese")), vf.createStatement(vf.createIRI("urn:2"), vf.createIRI("urn:location"), vf.createLiteral("USA")), vf.createStatement(vf.createIRI("urn:2"), vf.createIRI("urn:price"), vf.createLiteral(4.25)), vf.createStatement(vf.createIRI("urn:3"), vf.createIRI("urn:type"), vf.createLiteral("cheese")), vf.createStatement(vf.createIRI("urn:3"), vf.createIRI("urn:location"), vf.createLiteral("USA")), vf.createStatement(vf.createIRI("urn:3"), vf.createIRI("urn:price"), vf.createLiteral(5.25)), // French items that will be averaged. vf.createStatement(vf.createIRI("urn:4"), vf.createIRI("urn:type"), vf.createLiteral("cheese")), vf.createStatement(vf.createIRI("urn:4"), vf.createIRI("urn:location"), vf.createLiteral("France")), vf.createStatement(vf.createIRI("urn:4"), vf.createIRI("urn:price"), vf.createLiteral(8.5)), vf.createStatement(vf.createIRI("urn:5"), vf.createIRI("urn:type"), vf.createLiteral("cigarettes")), vf.createStatement(vf.createIRI("urn:5"), vf.createIRI("urn:location"), vf.createLiteral("France")), vf.createStatement(vf.createIRI("urn:5"), vf.createIRI("urn:price"), vf.createLiteral(3.99)), vf.createStatement(vf.createIRI("urn:6"), vf.createIRI("urn:type"), vf.createLiteral("cigarettes")), vf.createStatement(vf.createIRI("urn:6"), vf.createIRI("urn:location"), vf.createLiteral("France")), vf.createStatement(vf.createIRI("urn:6"), vf.createIRI("urn:price"), vf.createLiteral(4.99))); // Create the PCJ in Fluo and load the statements into Rya. final String pcjId = loadDataAndCreateQuery(sparql, statements); // Create the expected results of the SPARQL query once the PCJ has been computed. final Set<VisibilityBindingSet> expectedResults = new HashSet<>(); MapBindingSet bs = new MapBindingSet(); bs.addBinding("type", vf.createLiteral("cheese", XMLSchema.STRING)); bs.addBinding("location", vf.createLiteral("France", XMLSchema.STRING)); bs.addBinding("averagePrice", vf.createLiteral("8.5", XMLSchema.DECIMAL)); expectedResults.add( new VisibilityBindingSet(bs)); bs = new MapBindingSet(); bs.addBinding("type", vf.createLiteral("cigarettes", XMLSchema.STRING)); bs.addBinding("location", vf.createLiteral("France", XMLSchema.STRING)); bs.addBinding("averagePrice", vf.createLiteral("4.49", XMLSchema.DECIMAL)); expectedResults.add( new VisibilityBindingSet(bs) ); bs = new MapBindingSet(); bs.addBinding("type", vf.createLiteral("cheese", XMLSchema.STRING)); bs.addBinding("location", vf.createLiteral("USA", XMLSchema.STRING)); bs.addBinding("averagePrice", vf.createLiteral("4.75", XMLSchema.DECIMAL)); expectedResults.add( new VisibilityBindingSet(bs) ); // Verify the end results of the query match the expected results. final Set<VisibilityBindingSet> results = readGroupedResults(pcjId, new VariableOrder("type", "location")); assertEquals(expectedResults, results); } @Test public void nestedWithJoinGroupByManyBindings_averages() throws Exception { // A query that groups what is aggregated by two of the keys. final String sparql = "SELECT ?type ?location ?averagePrice ?milkType {" + "FILTER(?averagePrice > 4) " + "?type <urn:hasMilkType> ?milkType ." + "{SELECT ?type ?location (avg(?price) as ?averagePrice) {" + "?id <urn:type> ?type . " + "?id <urn:location> ?location ." + "?id <urn:price> ?price ." + "} " + "GROUP BY ?type ?location }}"; // Create the Statements that will be loaded into Rya. final ValueFactory vf = SimpleValueFactory.getInstance(); final Collection<Statement> statements = Sets.newHashSet( vf.createStatement(vf.createIRI("urn:1"), vf.createIRI("urn:type"), vf.createIRI("urn:blue")), vf.createStatement(vf.createIRI("urn:1"), vf.createIRI("urn:location"), vf.createLiteral("France")), vf.createStatement(vf.createIRI("urn:1"), vf.createIRI("urn:price"), vf.createLiteral(8.5)), vf.createStatement(vf.createIRI("urn:blue"), vf.createIRI("urn:hasMilkType"), vf.createLiteral("cow", XMLSchema.STRING)), vf.createStatement(vf.createIRI("urn:2"), vf.createIRI("urn:type"), vf.createIRI("urn:american")), vf.createStatement(vf.createIRI("urn:2"), vf.createIRI("urn:location"), vf.createLiteral("USA")), vf.createStatement(vf.createIRI("urn:2"), vf.createIRI("urn:price"), vf.createLiteral(.99)), vf.createStatement(vf.createIRI("urn:3"), vf.createIRI("urn:type"), vf.createIRI("urn:cheddar")), vf.createStatement(vf.createIRI("urn:3"), vf.createIRI("urn:location"), vf.createLiteral("USA")), vf.createStatement(vf.createIRI("urn:3"), vf.createIRI("urn:price"), vf.createLiteral(5.25)), // French items that will be averaged. vf.createStatement(vf.createIRI("urn:4"), vf.createIRI("urn:type"), vf.createIRI("urn:goat")), vf.createStatement(vf.createIRI("urn:4"), vf.createIRI("urn:location"), vf.createLiteral("France")), vf.createStatement(vf.createIRI("urn:4"), vf.createIRI("urn:price"), vf.createLiteral(6.5)), vf.createStatement(vf.createIRI("urn:goat"), vf.createIRI("urn:hasMilkType"), vf.createLiteral("goat", XMLSchema.STRING)), vf.createStatement(vf.createIRI("urn:5"), vf.createIRI("urn:type"), vf.createIRI("urn:fontina")), vf.createStatement(vf.createIRI("urn:5"), vf.createIRI("urn:location"), vf.createLiteral("Italy")), vf.createStatement(vf.createIRI("urn:5"), vf.createIRI("urn:price"), vf.createLiteral(3.99)), vf.createStatement(vf.createIRI("urn:fontina"), vf.createIRI("urn:hasMilkType"), vf.createLiteral("cow", XMLSchema.STRING)), vf.createStatement(vf.createIRI("urn:6"), vf.createIRI("urn:type"), vf.createIRI("urn:fontina")), vf.createStatement(vf.createIRI("urn:6"), vf.createIRI("urn:location"), vf.createLiteral("Italy")), vf.createStatement(vf.createIRI("urn:6"), vf.createIRI("urn:price"), vf.createLiteral(4.99))); // Create the PCJ in Fluo and load the statements into Rya. final String pcjId = loadDataAndCreateQuery(sparql, statements); // Create the expected results of the SPARQL query once the PCJ has been computed. final Set<VisibilityBindingSet> expectedResults = new HashSet<>(); MapBindingSet bs = new MapBindingSet(); bs.addBinding("type", vf.createIRI("urn:blue")); bs.addBinding("location", vf.createLiteral("France", XMLSchema.STRING)); bs.addBinding("averagePrice", vf.createLiteral("8.5", XMLSchema.DECIMAL)); bs.addBinding("milkType", vf.createLiteral("cow", XMLSchema.STRING)); expectedResults.add( new VisibilityBindingSet(bs)); bs = new MapBindingSet(); bs.addBinding("type", vf.createIRI("urn:goat")); bs.addBinding("location", vf.createLiteral("France", XMLSchema.STRING)); bs.addBinding("averagePrice", vf.createLiteral("6.5", XMLSchema.DECIMAL)); bs.addBinding("milkType", vf.createLiteral("goat", XMLSchema.STRING)); expectedResults.add( new VisibilityBindingSet(bs) ); bs = new MapBindingSet(); bs.addBinding("type", vf.createIRI("urn:fontina")); bs.addBinding("location", vf.createLiteral("Italy", XMLSchema.STRING)); bs.addBinding("averagePrice", vf.createLiteral("4.49", XMLSchema.DECIMAL)); bs.addBinding("milkType", vf.createLiteral("cow", XMLSchema.STRING)); expectedResults.add( new VisibilityBindingSet(bs) ); // Verify the end results of the query match the expected results. final Set<VisibilityBindingSet> results = readGroupedResults(pcjId, new VariableOrder("type", "location")); assertEquals(expectedResults, results); } private Set<VisibilityBindingSet> readAllResults(final String pcjId) throws Exception { requireNonNull(pcjId); // Read all of the results from the Kafka topic. final Set<VisibilityBindingSet> results = new HashSet<>(); try(final KafkaConsumer<String, VisibilityBindingSet> consumer = makeConsumer(pcjId)) { final ConsumerRecords<String, VisibilityBindingSet> records = consumer.poll(5000); final Iterator<ConsumerRecord<String, VisibilityBindingSet>> recordIterator = records.iterator(); while (recordIterator.hasNext()) { results.add( recordIterator.next().value() ); } } return results; } private VisibilityBindingSet readLastResult(final String pcjId) throws Exception { requireNonNull(pcjId); // Read the results from the Kafka topic. The last one has the final aggregation result. VisibilityBindingSet result = null; try(final KafkaConsumer<String, VisibilityBindingSet> consumer = makeConsumer(pcjId)) { final ConsumerRecords<String, VisibilityBindingSet> records = consumer.poll(5000); final Iterator<ConsumerRecord<String, VisibilityBindingSet>> recordIterator = records.iterator(); while (recordIterator.hasNext()) { result = recordIterator.next().value(); } } return result; } private Set<VisibilityBindingSet> readGroupedResults(final String pcjId, final VariableOrder groupByVars) { requireNonNull(pcjId); // Read the results from the Kafka topic. The last one for each set of Group By values is an aggregation result. // The key in this map is a Binding Set containing only the group by variables. final Map<BindingSet, VisibilityBindingSet> results = new HashMap<>(); try(final KafkaConsumer<String, VisibilityBindingSet> consumer = makeConsumer(pcjId)) { final ConsumerRecords<String, VisibilityBindingSet> records = consumer.poll(5000); final Iterator<ConsumerRecord<String, VisibilityBindingSet>> recordIterator = records.iterator(); while (recordIterator.hasNext()) { final VisibilityBindingSet visBindingSet = recordIterator.next().value(); final MapBindingSet key = new MapBindingSet(); for(final String groupByBar : groupByVars) { key.addBinding( visBindingSet.getBinding(groupByBar) ); } results.put(key, visBindingSet); } } return Sets.newHashSet( results.values() ); } }
apache/solr
36,331
solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.handler.component; import static org.apache.solr.common.params.CommonParams.DISTRIB; import static org.apache.solr.common.params.CommonParams.PATH; import static org.apache.solr.handler.component.ResponseBuilder.STAGE_DONE; import static org.apache.solr.handler.component.ResponseBuilder.STAGE_EXECUTE_QUERY; import static org.apache.solr.handler.component.ResponseBuilder.STAGE_GET_FIELDS; import static org.apache.solr.handler.component.ResponseBuilder.STAGE_PARSE_QUERY; import static org.apache.solr.handler.component.ResponseBuilder.STAGE_START; import static org.apache.solr.handler.component.ResponseBuilder.STAGE_TOP_GROUPS; import static org.apache.solr.request.SolrRequestInfo.getQueryLimits; import static org.apache.solr.response.SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_DETAILS_KEY; import com.codahale.metrics.Counter; import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import org.apache.lucene.index.ExitableDirectoryReader; import org.apache.lucene.search.TotalHits; import org.apache.solr.client.solrj.SolrRequest.SolrRequestType; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.cloud.ZkController; import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrException; import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.CursorMarkParams; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.ShardParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.SimpleOrderedMap; import org.apache.solr.common.util.StrUtils; import org.apache.solr.core.CloseHook; import org.apache.solr.core.CoreContainer; import org.apache.solr.core.PluginInfo; import org.apache.solr.core.SolrCore; import org.apache.solr.handler.RequestHandlerBase; import org.apache.solr.logging.MDCLoggingContext; import org.apache.solr.metrics.MetricsMap; import org.apache.solr.metrics.SolrMetricsContext; import org.apache.solr.pkg.PackageAPI; import org.apache.solr.pkg.PackageListeners; import org.apache.solr.pkg.SolrPackageLoader; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.search.CursorMark; import org.apache.solr.search.QueryLimits; import org.apache.solr.search.SortSpec; import org.apache.solr.search.facet.FacetModule; import org.apache.solr.security.AuthorizationContext; import org.apache.solr.security.PermissionNameProvider; import org.apache.solr.util.RTimerTree; import org.apache.solr.util.SolrPluginUtils; import org.apache.solr.util.ThreadCpuTimer; import org.apache.solr.util.circuitbreaker.CircuitBreaker; import org.apache.solr.util.circuitbreaker.CircuitBreakerRegistry; import org.apache.solr.util.circuitbreaker.CircuitBreakerUtils; import org.apache.solr.util.plugin.PluginInfoInitialized; import org.apache.solr.util.plugin.SolrCoreAware; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.MDC; /** Refer SOLR-281 */ public class SearchHandler extends RequestHandlerBase implements SolrCoreAware, PluginInfoInitialized, PermissionNameProvider { static final String INIT_COMPONENTS = "components"; static final String INIT_FIRST_COMPONENTS = "first-components"; static final String INIT_LAST_COMPONENTS = "last-components"; protected static final String SHARD_HANDLER_SUFFIX = "[shard]"; private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); /** * A counter to ensure that no RID is equal, even if they fall in the same millisecond * * @deprecated this was replaced by the auto-generated trace ids */ @Deprecated(since = "9.4") private static final AtomicLong ridCounter = new AtomicLong(); /** * An opt-out flag to prevent the addition of {@link CommonParams#REQUEST_ID} tracing on * distributed queries * * <p>Defaults to 'false' if not specified. * * @see CommonParams#DISABLE_REQUEST_ID * @deprecated this was replaced by the auto-generated trace ids */ @Deprecated(since = "9.4") private static final boolean DISABLE_REQUEST_ID_DEFAULT = Boolean.getBoolean("solr.disableRequestId"); private HandlerMetrics metricsShard = HandlerMetrics.NO_OP; private final Map<String, Counter> shardPurposes = new ConcurrentHashMap<>(); protected volatile List<SearchComponent> components; private ShardHandlerFactory shardHandlerFactory; private PluginInfo shfInfo; private SolrCore core; protected List<String> getDefaultComponents() { ArrayList<String> names = new ArrayList<>(9); names.add(QueryComponent.COMPONENT_NAME); names.add(FacetComponent.COMPONENT_NAME); names.add(FacetModule.COMPONENT_NAME); names.add(MoreLikeThisComponent.COMPONENT_NAME); names.add(HighlightComponent.COMPONENT_NAME); names.add(StatsComponent.COMPONENT_NAME); names.add(DebugComponent.COMPONENT_NAME); names.add(ExpandComponent.COMPONENT_NAME); names.add(TermsComponent.COMPONENT_NAME); return names; } @Override public void init(PluginInfo info) { init(info.initArgs); for (PluginInfo child : info.children) { if ("shardHandlerFactory".equals(child.type)) { this.shfInfo = child; break; } } } @Override public void initializeMetrics(SolrMetricsContext parentContext, String scope) { super.initializeMetrics(parentContext, scope); metricsShard = new HandlerMetrics( // will register various metrics in the context solrMetricsContext, getCategory().toString(), scope + SHARD_HANDLER_SUFFIX); solrMetricsContext.gauge( new MetricsMap(map -> shardPurposes.forEach((k, v) -> map.putNoEx(k, v.getCount()))), true, "purposes", getCategory().toString(), scope + SHARD_HANDLER_SUFFIX); } @Override public HandlerMetrics getMetricsForThisRequest(SolrQueryRequest req) { return req.getParams().getBool(ShardParams.IS_SHARD, false) ? this.metricsShard : this.metrics; } @Override public PermissionNameProvider.Name getPermissionName(AuthorizationContext ctx) { return PermissionNameProvider.Name.READ_PERM; } /** * Initialize the components based on name. Note, if using <code>INIT_FIRST_COMPONENTS</code> or * <code>INIT_LAST_COMPONENTS</code>, then the {@link DebugComponent} will always occur last. If * this is not desired, then one must explicitly declare all components using the <code> * INIT_COMPONENTS</code> syntax. */ @Override @SuppressWarnings("unchecked") public void inform(SolrCore core) { this.core = core; List<String> c = (List<String>) initArgs.get(INIT_COMPONENTS); Set<String> missing = new HashSet<>(core.getSearchComponents().checkContains(c)); List<String> first = (List<String>) initArgs.get(INIT_FIRST_COMPONENTS); missing.addAll(core.getSearchComponents().checkContains(first)); List<String> last = (List<String>) initArgs.get(INIT_LAST_COMPONENTS); missing.addAll(core.getSearchComponents().checkContains(last)); if (!missing.isEmpty()) throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "Missing SearchComponents named : " + missing); if (c != null && (first != null || last != null)) throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "First/Last components only valid if you do not declare 'components'"); if (shfInfo == null) { shardHandlerFactory = core.getCoreContainer().getShardHandlerFactory(); } else { shardHandlerFactory = core.createInitInstance(shfInfo, ShardHandlerFactory.class, null, null); core.addCloseHook( new CloseHook() { @Override public void preClose(SolrCore core) { shardHandlerFactory.close(); } }); shardHandlerFactory.setSecurityBuilder( core.getCoreContainer().getPkiAuthenticationSecurityBuilder()); } if (core.getCoreContainer().isZooKeeperAware()) { core.getPackageListeners() .addListener( new PackageListeners.Listener() { @Override public String packageName() { return null; } @Override public Map<String, PackageAPI.PkgVersion> packageDetails() { return Collections.emptyMap(); } @Override public void changed(SolrPackageLoader.SolrPackage pkg, Ctx ctx) { // we could optimize this by listening to only relevant packages, // but it is not worth optimizing as these are lightweight objects components = null; } }); } } @SuppressWarnings({"unchecked"}) private void initComponents() { Object declaredComponents = initArgs.get(INIT_COMPONENTS); List<String> first = (List<String>) initArgs.get(INIT_FIRST_COMPONENTS); List<String> last = (List<String>) initArgs.get(INIT_LAST_COMPONENTS); List<String> list = null; boolean makeDebugLast = true; if (declaredComponents == null) { // Use the default component list list = getDefaultComponents(); if (first != null) { List<String> clist = first; clist.addAll(list); list = clist; } if (last != null) { list.addAll(last); } } else { list = (List<String>) declaredComponents; if (first != null || last != null) { throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "First/Last components only valid if you do not declare 'components'"); } makeDebugLast = false; } // Build the component list List<SearchComponent> components = new ArrayList<>(list.size()); DebugComponent dbgCmp = null; for (String c : list) { SearchComponent comp = core.getSearchComponent(c); if (comp instanceof DebugComponent && makeDebugLast == true) { dbgCmp = (DebugComponent) comp; } else { components.add(comp); log.debug("Adding component:{}", comp); } } if (makeDebugLast == true && dbgCmp != null) { components.add(dbgCmp); log.debug("Adding debug component:{}", dbgCmp); } this.components = components; } public List<SearchComponent> getComponents() { List<SearchComponent> result = components; // volatile read if (result == null) { synchronized (this) { if (components == null) { initComponents(); } result = components; } } return result; } private boolean isDistrib(SolrQueryRequest req) { boolean isZkAware = req.getCoreContainer().isZooKeeperAware(); boolean isDistrib = req.getParams().getBool(DISTRIB, isZkAware); if (!isDistrib) { // for back compat, a shards param with URLs like localhost:8983/solr will mean that this // search is distributed. final String shards = req.getParams().get(ShardParams.SHARDS); isDistrib = ((shards != null) && (shards.indexOf('/') > 0)); } return isDistrib; } public ShardHandler getAndPrepShardHandler(SolrQueryRequest req, ResponseBuilder rb) { ShardHandler shardHandler = null; CoreContainer cc = req.getCoreContainer(); boolean isZkAware = cc.isZooKeeperAware(); if (rb.isDistrib) { shardHandler = shardHandlerFactory.getShardHandler(); shardHandler.prepDistributed(rb); if (!rb.isDistrib) { // request is not distributed after all and so the shard handler is not needed shardHandler = null; } } if (isZkAware) { String shardsTolerant = req.getParams().get(ShardParams.SHARDS_TOLERANT); boolean requireZkConnected = shardsTolerant != null && shardsTolerant.equals(ShardParams.REQUIRE_ZK_CONNECTED); ZkController zkController = cc.getZkController(); boolean zkConnected = zkController != null && zkController.getZkClient().isConnected(); if (requireZkConnected && false == zkConnected) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "ZooKeeper is not connected"); } else { NamedList<Object> headers = rb.rsp.getResponseHeader(); if (headers != null) { headers.add("zkConnected", zkConnected); } } } return shardHandler; } /** * Override this method if you require a custom {@link ResponseBuilder} e.g. for use by a custom * {@link SearchComponent}. */ protected ResponseBuilder newResponseBuilder( SolrQueryRequest req, SolrQueryResponse rsp, List<SearchComponent> components) { return new ResponseBuilder(req, rsp, components); } /** * Check if {@link SolrRequestType#QUERY} circuit breakers are tripped. Override this method in * sub classes that do not want to check circuit breakers. * * @return true if circuit breakers are tripped, false otherwise. */ protected boolean checkCircuitBreakers( SolrQueryRequest req, SolrQueryResponse rsp, ResponseBuilder rb) { if (isInternalShardRequest(req)) { if (log.isTraceEnabled()) { log.trace("Internal request, skipping circuit breaker check"); } return false; } final RTimerTree timer = rb.isDebug() ? req.getRequestTimer() : null; final CircuitBreakerRegistry circuitBreakerRegistry = req.getCore().getCircuitBreakerRegistry(); if (circuitBreakerRegistry.isEnabled(SolrRequestType.QUERY)) { List<CircuitBreaker> trippedCircuitBreakers; if (timer != null) { RTimerTree subt = timer.sub("circuitbreaker"); rb.setTimer(subt); trippedCircuitBreakers = circuitBreakerRegistry.checkTripped(SolrRequestType.QUERY); rb.getTimer().stop(); } else { trippedCircuitBreakers = circuitBreakerRegistry.checkTripped(SolrRequestType.QUERY); } return CircuitBreakerUtils.reportErrorIfBreakersTripped(rsp, trippedCircuitBreakers); } return false; } @Override public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception { if (req.getParams().getBool(ShardParams.IS_SHARD, false)) { int purpose = req.getParams().getInt(ShardParams.SHARDS_PURPOSE, 0); SolrPluginUtils.forEachRequestPurpose( purpose, n -> shardPurposes.computeIfAbsent(n, name -> new Counter()).inc()); } List<SearchComponent> components = getComponents(); ResponseBuilder rb = newResponseBuilder(req, rsp, components); if (rb.requestInfo != null) { rb.requestInfo.setResponseBuilder(rb); } rb.isDistrib = isDistrib(req); tagRequestWithRequestId(rb); boolean dbg = req.getParams().getBool(CommonParams.DEBUG_QUERY, false); rb.setDebug(dbg); if (dbg == false) { // if it's true, we are doing everything anyway. SolrPluginUtils.getDebugInterests(req.getParams().getParams(CommonParams.DEBUG), rb); } final RTimerTree timer = rb.isDebug() ? req.getRequestTimer() : null; if (checkCircuitBreakers(req, rsp, rb)) { return; // Circuit breaker tripped, return immediately } processComponents(req, rsp, rb, timer, components); // SOLR-5550: still provide shards.info if requested even for a short-circuited distrib request if (!rb.isDistrib && req.getParams().getBool(ShardParams.SHARDS_INFO, false) && rb.shortCircuitedURL != null) { NamedList<Object> shardInfo = new SimpleOrderedMap<>(); SimpleOrderedMap<Object> nl = new SimpleOrderedMap<>(); if (rsp.getException() != null) { Throwable cause = rsp.getException(); if (cause instanceof SolrServerException) { cause = ((SolrServerException) cause).getRootCause(); } else { if (cause.getCause() != null) { cause = cause.getCause(); } } nl.add("error", cause.toString()); if (!core.getCoreContainer().hideStackTrace()) { StringWriter trace = new StringWriter(); cause.printStackTrace(new PrintWriter(trace)); nl.add("trace", trace.toString()); } } else if (rb.getResults() != null) { nl.add("numFound", rb.getResults().docList.matches()); nl.add( "numFoundExact", rb.getResults().docList.hitCountRelation() == TotalHits.Relation.EQUAL_TO); nl.add("maxScore", rb.getResults().docList.maxScore()); } nl.add("shardAddress", rb.shortCircuitedURL); nl.add("time", req.getRequestTimer().getTime()); // elapsed time of this request so far int pos = rb.shortCircuitedURL.indexOf("://"); String shardInfoName = pos != -1 ? rb.shortCircuitedURL.substring(pos + 3) : rb.shortCircuitedURL; shardInfo.add(shardInfoName, nl); rsp.getValues().add(ShardParams.SHARDS_INFO, shardInfo); } } private void processComponents( SolrQueryRequest req, SolrQueryResponse rsp, ResponseBuilder rb, RTimerTree timer, List<SearchComponent> components) throws IOException { // creates a ShardHandler object only if it's needed final ShardHandler shardHandler1 = getAndPrepShardHandler(req, rb); if (!prepareComponents(req, rb, timer, components)) return; { // Once all of our components have been prepared, check if this request involves a SortSpec. // If it does, and if our request includes a cursorMark param, then parse & init the // CursorMark state (This must happen after the prepare() of all components, because any // component may have modified the SortSpec) final SortSpec spec = rb.getSortSpec(); final String cursorStr = rb.req.getParams().get(CursorMarkParams.CURSOR_MARK_PARAM); if (null != spec && null != cursorStr) { final CursorMark cursorMark = new CursorMark(rb.req.getSchema(), spec); cursorMark.parseSerializedTotem(cursorStr); rb.setCursorMark(cursorMark); } } if (!rb.isDistrib) { // a normal non-distributed request try { // The semantics of debugging vs not debugging are different enough that // it makes sense to have two control loops if (!rb.isDebug()) { // Process for (SearchComponent c : components) { if (checkLimitsBefore(c, "process", rb.req, rb.rsp, components)) { shortCircuitedResults(req, rb); return; } c.process(rb); } } else { // Process RTimerTree subt = timer.sub("process"); for (SearchComponent c : components) { if (checkLimitsBefore(c, "process debug", rb.req, rb.rsp, components)) { shortCircuitedResults(req, rb); return; } rb.setTimer(subt.sub(c.getName())); c.process(rb); rb.getTimer().stop(); } subt.stop(); // add the timing info if (rb.isDebugTimings()) { rb.addDebugInfo("timing", timer.asNamedList()); } } } catch (ExitableDirectoryReader.ExitingReaderException ex) { log.warn("Query terminated: {}; ", req.getParamString(), ex); shortCircuitedResults(req, rb); } } else { // a distributed request if (rb.outgoing == null) { rb.outgoing = new ArrayList<>(); } rb.finished = new ArrayList<>(); int nextStage = 0; long totalShardCpuTime = 0L; do { rb.setStage(nextStage); nextStage = ResponseBuilder.STAGE_DONE; // call all components for (SearchComponent c : components) { if (checkLimitsBefore(c, "distrib", rb.req, rb.rsp, components)) { shortCircuitedResults(req, rb); return; } // the next stage is the minimum of what all components report nextStage = Math.min(nextStage, c.distributedProcess(rb)); } // check the outgoing queue and send requests while (rb.outgoing.size() > 0) { // submit all current request tasks at once while (rb.outgoing.size() > 0) { ShardRequest sreq = rb.outgoing.remove(0); sreq.actualShards = sreq.shards; if (sreq.actualShards == ShardRequest.ALL_SHARDS) { sreq.actualShards = rb.shards; } // presume we'll get a response from each shard we send to sreq.responses = new ArrayList<>(sreq.actualShards.length); QueryLimits queryLimits = QueryLimits.getCurrentLimits(); // TODO: map from shard to address[] for (String shard : sreq.actualShards) { ModifiableSolrParams params = new ModifiableSolrParams(sreq.params); ShardHandler.setShardAttributesToParams(params, sreq.purpose); // Distributed request -- need to send queryID as a part of the distributed request params.setNonNull(ShardParams.QUERY_ID, rb.queryID); if (rb.requestInfo != null) { // we could try and detect when this is needed, but it could be tricky params.set("NOW", Long.toString(rb.requestInfo.getNOW().getTime())); } String shardQt = params.get(ShardParams.SHARDS_QT); if (shardQt != null) { params.set(CommonParams.QT, shardQt); } else { // for distributed queries that don't include shards.qt, use the original path // as the default but operators need to update their luceneMatchVersion to enable // this behavior since it did not work this way prior to 5.1 String reqPath = (String) req.getContext().get(PATH); if (!"/select".equals(reqPath)) { params.set(CommonParams.QT, reqPath); } // else if path is /select, then the qt gets passed thru if set } if (queryLimits.isLimitsEnabled()) { if (queryLimits.adjustShardRequestLimits(sreq, shard, params, rb)) { // Skip this shard since one or more limits will be tripped if (log.isDebugEnabled()) { log.debug( "Skipping request to shard '{}' due to query limits, params {}", shard, params); } continue; } } shardHandler1.submit(sreq, shard, params); } } // now wait for replies, but if anyone puts more requests on // the outgoing queue, send them out immediately (by exiting // this loop) boolean tolerant = HttpShardHandler.getShardsTolerantAsBool(rb.req); while (rb.outgoing.size() == 0) { ShardResponse srsp = tolerant ? shardHandler1.takeCompletedIncludingErrors() : shardHandler1.takeCompletedOrError(); if (srsp == null) break; // no more requests to wait for AtomicReference<Object> detailMesg = new AtomicReference<>(); // or perhaps new Object[1] ? boolean anyResponsesPartial = srsp.getShardRequest().responses.stream() .anyMatch( response -> { NamedList<Object> resp = response.getSolrResponse().getResponse(); if (resp == null) { return false; } Object recursive = resp._get(List.of("responseHeader", "partialResults"), null); if (recursive != null) { Object message = "[Shard:" + response.getShardAddress() + "]" + resp._get( List.of( "responseHeader", RESPONSE_HEADER_PARTIAL_RESULTS_DETAILS_KEY), null); detailMesg.compareAndSet(null, message); // first one, ingore rest } return recursive != null; }); if (anyResponsesPartial) { rb.rsp.addPartialResponseDetail(detailMesg.get()); rsp.setPartialResults(rb.req); } // Was there an exception? // In the case of tolerant search, we need to check all responses to see if there was an // exception. Optional<Throwable> shardException = srsp.getShardRequest().responses.stream() .map(ShardResponse::getException) .filter(Objects::nonNull) .findFirst(); if (shardException.isPresent()) { // If things are not tolerant, abort everything and rethrow if (!tolerant) { throwSolrException(shardException.get()); } else { // Check if the purpose includes 'PURPOSE_GET_TOP_IDS' boolean includesTopIdsPurpose = (srsp.getShardRequest().purpose & ShardRequest.PURPOSE_GET_TOP_IDS) != 0; // Check if all responses have exceptions boolean allResponsesHaveExceptions = srsp.getShardRequest().responses.stream() .allMatch(response -> response.getException() != null); // Check if all shards have failed for PURPOSE_GET_TOP_IDS boolean allShardsFailed = includesTopIdsPurpose && allResponsesHaveExceptions; // if all shards fail, fail the request despite shards.tolerant if (allShardsFailed) { throwSolrException(shardException.get()); } else { rsp.setPartialResults(rb.req); if (publishCpuTime) { totalShardCpuTime += computeShardCpuTime(srsp.getShardRequest().responses); rsp.getResponseHeader().add(ThreadCpuTimer.CPU_TIME, totalShardCpuTime); rsp.addToLog(ThreadCpuTimer.CPU_TIME, totalShardCpuTime); } } } } rb.finished.add(srsp.getShardRequest()); // let the components see the responses to the request for (SearchComponent c : components) { if (checkLimitsBefore( c, "handleResponses next stage:" + stageToString(nextStage), rb.req, rb.rsp, components)) { shortCircuitedResults(req, rb); return; } c.handleResponses(rb, srsp.getShardRequest()); } // Compute total CpuTime used by all shards. if (publishCpuTime) { totalShardCpuTime += computeShardCpuTime(srsp.getShardRequest().responses); } } } for (SearchComponent c : components) { if (checkLimitsBefore( c, "finishStage stage:" + stageToString(nextStage), rb.req, rb.rsp, components)) { return; } c.finishStage(rb); } // we are done when the next stage is MAX_VALUE } while (nextStage != Integer.MAX_VALUE); if (publishCpuTime) { rsp.getResponseHeader().add(ThreadCpuTimer.CPU_TIME, totalShardCpuTime); rsp.addToLog(ThreadCpuTimer.CPU_TIME, totalShardCpuTime); } } } private static boolean prepareComponents( SolrQueryRequest req, ResponseBuilder rb, RTimerTree timer, List<SearchComponent> components) throws IOException { if (timer == null) { // non-debugging prepare phase for (SearchComponent component : components) { if (checkLimitsBefore(component, "prepare", rb.req, rb.rsp, components)) { shortCircuitedResults(req, rb); return false; } component.prepare(rb); } } else { // debugging prepare phase RTimerTree subt = timer.sub("prepare"); for (SearchComponent c : components) { if (checkLimitsBefore(c, "prepare debug", rb.req, rb.rsp, components)) { shortCircuitedResults(req, rb); return false; } rb.setTimer(subt.sub(c.getName())); c.prepare(rb); rb.getTimer().stop(); } subt.stop(); } return true; } protected String stageToString(int stage) { // This should probably be a enum, but that change should be its own ticket. switch (stage) { case STAGE_START: return "START"; case STAGE_PARSE_QUERY: return "PARSE_QUERY"; case STAGE_TOP_GROUPS: return "TOP_GROUPS"; case STAGE_EXECUTE_QUERY: return "EXECUTE_QUERY"; case STAGE_GET_FIELDS: return "GET_FIELDS"; // nobody wants to think it was DONE and canceled after it completed... case STAGE_DONE: return "FINISHING"; default: return "CUSTOM_STAGE_" + String.valueOf(stage); } } private static void shortCircuitedResults(SolrQueryRequest req, ResponseBuilder rb) { if (rb.rsp.getResponse() == null) { rb.rsp.addResponse(new SolrDocumentList()); // If a cursorMark was passed, and we didn't progress, set // the nextCursorMark to the same position String cursorStr = rb.req.getParams().get(CursorMarkParams.CURSOR_MARK_PARAM); if (null != cursorStr) { rb.rsp.add(CursorMarkParams.CURSOR_MARK_NEXT, cursorStr); } } if (rb.isDebug()) { NamedList<Object> debug = new NamedList<>(); debug.add("explain", new NamedList<>()); rb.rsp.add("debug", debug); } rb.rsp.setPartialResults(rb.req); } private static boolean checkLimitsBefore( SearchComponent c, String when, SolrQueryRequest req, SolrQueryResponse resp, List<SearchComponent> components) { return getQueryLimits(req, resp) .maybeExitWithPartialResults( () -> "[" + when + "] Limit(s) exceeded prior to " + c.getName() + " in " + components.stream() .map(SearchComponent::getName) .collect(Collectors.toList())); } private long computeShardCpuTime(List<ShardResponse> responses) { long totalShardCpuTime = 0; for (ShardResponse response : responses) { if ((response.getSolrResponse() != null) && (response.getSolrResponse().getResponse() != null) && (response.getSolrResponse().getResponse().get("responseHeader") != null)) { @SuppressWarnings("unchecked") SimpleOrderedMap<Object> header = (SimpleOrderedMap<Object>) response.getSolrResponse().getResponse().get(SolrQueryResponse.RESPONSE_HEADER_KEY); if (header != null) { Long shardCpuTime = (Long) header.get(ThreadCpuTimer.CPU_TIME); if (shardCpuTime != null) { totalShardCpuTime += shardCpuTime; } } } } return totalShardCpuTime; } private static void throwSolrException(Throwable shardResponseException) throws SolrException { if (shardResponseException instanceof SolrException) { throw (SolrException) shardResponseException; } else { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, shardResponseException); } } private void tagRequestWithRequestId(ResponseBuilder rb) { final boolean ridTaggingDisabled = rb.req.getParams().getBool(CommonParams.DISABLE_REQUEST_ID, DISABLE_REQUEST_ID_DEFAULT); if (!ridTaggingDisabled) { String rid = getOrGenerateRequestId(rb.req); // NOTE: SearchHandler explicitly never clears/removes this MDC value... // We want it to live for the entire request, beyond the scope of SearchHandler's processing, // and trust SolrDispatchFilter to clean it up at the end of the request. // // Examples: // - ERROR logging of Exceptions propogated up to our base class // - SolrCore.RequestLog // - ERRORs that may be logged during response writing MDC.put(CommonParams.REQUEST_ID, rid); if (StrUtils.isBlank(rb.req.getParams().get(CommonParams.REQUEST_ID))) { ModifiableSolrParams params = new ModifiableSolrParams(rb.req.getParams()); params.add(CommonParams.REQUEST_ID, rid); // add rid to the request so that shards see it rb.req.setParams(params); } if (rb.isDistrib) { rb.rsp.addToLog(CommonParams.REQUEST_ID, rid); // to see it in the logs of the landing core } } } /** * Returns a String to use as an identifier for this request. * * <p>If the provided {@link SolrQueryRequest} contains a non-blank {@link * CommonParams#REQUEST_ID} param value this is used. This is especially useful for users who * deploy Solr as one component in a larger ecosystem, and want to use an external ID utilized by * other components as well. If no {@link CommonParams#REQUEST_ID} value is present, one is * generated from scratch for the request. * * <p>Callers are responsible for storing the returned value in the {@link SolrQueryRequest} * object if they want to ensure that ID generation is not redone on subsequent calls. */ public static String getOrGenerateRequestId(SolrQueryRequest req) { String rid = req.getParams().get(CommonParams.REQUEST_ID); if (StrUtils.isNotBlank(rid)) { return rid; } String traceId = MDCLoggingContext.getTraceId(); if (StrUtils.isNotBlank(traceId)) { return traceId; } return generateRid(req); } private static String generateRid(SolrQueryRequest req) { String hostName = req.getCoreContainer().getHostName(); return hostName + "-" + ridCounter.getAndIncrement(); } //////////////////////// SolrInfoMBeans methods ////////////////////// @Override public String getDescription() { StringBuilder sb = new StringBuilder(); sb.append("Search using components: "); if (components != null) { for (SearchComponent c : components) { sb.append(c.getName()); sb.append(","); } } return sb.toString(); } @Override public Boolean registerV2() { return Boolean.TRUE; } } // TODO: generalize how a comm component can fit into search component framework // TODO: statics should be per-core singletons
googleapis/google-cloud-java
36,340
java-talent/grpc-google-cloud-talent-v4beta1/src/main/java/com/google/cloud/talent/v4beta1/CompanyServiceGrpc.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.talent.v4beta1; import static io.grpc.MethodDescriptor.generateFullMethodName; /** * * * <pre> * A service that handles company management, including CRUD and enumeration. * </pre> */ @javax.annotation.Generated( value = "by gRPC proto compiler", comments = "Source: google/cloud/talent/v4beta1/company_service.proto") @io.grpc.stub.annotations.GrpcGenerated public final class CompanyServiceGrpc { private CompanyServiceGrpc() {} public static final java.lang.String SERVICE_NAME = "google.cloud.talent.v4beta1.CompanyService"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor< com.google.cloud.talent.v4beta1.CreateCompanyRequest, com.google.cloud.talent.v4beta1.Company> getCreateCompanyMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "CreateCompany", requestType = com.google.cloud.talent.v4beta1.CreateCompanyRequest.class, responseType = com.google.cloud.talent.v4beta1.Company.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.talent.v4beta1.CreateCompanyRequest, com.google.cloud.talent.v4beta1.Company> getCreateCompanyMethod() { io.grpc.MethodDescriptor< com.google.cloud.talent.v4beta1.CreateCompanyRequest, com.google.cloud.talent.v4beta1.Company> getCreateCompanyMethod; if ((getCreateCompanyMethod = CompanyServiceGrpc.getCreateCompanyMethod) == null) { synchronized (CompanyServiceGrpc.class) { if ((getCreateCompanyMethod = CompanyServiceGrpc.getCreateCompanyMethod) == null) { CompanyServiceGrpc.getCreateCompanyMethod = getCreateCompanyMethod = io.grpc.MethodDescriptor .<com.google.cloud.talent.v4beta1.CreateCompanyRequest, com.google.cloud.talent.v4beta1.Company> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "CreateCompany")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.talent.v4beta1.CreateCompanyRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.talent.v4beta1.Company.getDefaultInstance())) .setSchemaDescriptor( new CompanyServiceMethodDescriptorSupplier("CreateCompany")) .build(); } } } return getCreateCompanyMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.talent.v4beta1.GetCompanyRequest, com.google.cloud.talent.v4beta1.Company> getGetCompanyMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "GetCompany", requestType = com.google.cloud.talent.v4beta1.GetCompanyRequest.class, responseType = com.google.cloud.talent.v4beta1.Company.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.talent.v4beta1.GetCompanyRequest, com.google.cloud.talent.v4beta1.Company> getGetCompanyMethod() { io.grpc.MethodDescriptor< com.google.cloud.talent.v4beta1.GetCompanyRequest, com.google.cloud.talent.v4beta1.Company> getGetCompanyMethod; if ((getGetCompanyMethod = CompanyServiceGrpc.getGetCompanyMethod) == null) { synchronized (CompanyServiceGrpc.class) { if ((getGetCompanyMethod = CompanyServiceGrpc.getGetCompanyMethod) == null) { CompanyServiceGrpc.getGetCompanyMethod = getGetCompanyMethod = io.grpc.MethodDescriptor .<com.google.cloud.talent.v4beta1.GetCompanyRequest, com.google.cloud.talent.v4beta1.Company> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetCompany")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.talent.v4beta1.GetCompanyRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.talent.v4beta1.Company.getDefaultInstance())) .setSchemaDescriptor(new CompanyServiceMethodDescriptorSupplier("GetCompany")) .build(); } } } return getGetCompanyMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.talent.v4beta1.UpdateCompanyRequest, com.google.cloud.talent.v4beta1.Company> getUpdateCompanyMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "UpdateCompany", requestType = com.google.cloud.talent.v4beta1.UpdateCompanyRequest.class, responseType = com.google.cloud.talent.v4beta1.Company.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.talent.v4beta1.UpdateCompanyRequest, com.google.cloud.talent.v4beta1.Company> getUpdateCompanyMethod() { io.grpc.MethodDescriptor< com.google.cloud.talent.v4beta1.UpdateCompanyRequest, com.google.cloud.talent.v4beta1.Company> getUpdateCompanyMethod; if ((getUpdateCompanyMethod = CompanyServiceGrpc.getUpdateCompanyMethod) == null) { synchronized (CompanyServiceGrpc.class) { if ((getUpdateCompanyMethod = CompanyServiceGrpc.getUpdateCompanyMethod) == null) { CompanyServiceGrpc.getUpdateCompanyMethod = getUpdateCompanyMethod = io.grpc.MethodDescriptor .<com.google.cloud.talent.v4beta1.UpdateCompanyRequest, com.google.cloud.talent.v4beta1.Company> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "UpdateCompany")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.talent.v4beta1.UpdateCompanyRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.talent.v4beta1.Company.getDefaultInstance())) .setSchemaDescriptor( new CompanyServiceMethodDescriptorSupplier("UpdateCompany")) .build(); } } } return getUpdateCompanyMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.talent.v4beta1.DeleteCompanyRequest, com.google.protobuf.Empty> getDeleteCompanyMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "DeleteCompany", requestType = com.google.cloud.talent.v4beta1.DeleteCompanyRequest.class, responseType = com.google.protobuf.Empty.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.talent.v4beta1.DeleteCompanyRequest, com.google.protobuf.Empty> getDeleteCompanyMethod() { io.grpc.MethodDescriptor< com.google.cloud.talent.v4beta1.DeleteCompanyRequest, com.google.protobuf.Empty> getDeleteCompanyMethod; if ((getDeleteCompanyMethod = CompanyServiceGrpc.getDeleteCompanyMethod) == null) { synchronized (CompanyServiceGrpc.class) { if ((getDeleteCompanyMethod = CompanyServiceGrpc.getDeleteCompanyMethod) == null) { CompanyServiceGrpc.getDeleteCompanyMethod = getDeleteCompanyMethod = io.grpc.MethodDescriptor .<com.google.cloud.talent.v4beta1.DeleteCompanyRequest, com.google.protobuf.Empty> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "DeleteCompany")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.talent.v4beta1.DeleteCompanyRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.protobuf.Empty.getDefaultInstance())) .setSchemaDescriptor( new CompanyServiceMethodDescriptorSupplier("DeleteCompany")) .build(); } } } return getDeleteCompanyMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.talent.v4beta1.ListCompaniesRequest, com.google.cloud.talent.v4beta1.ListCompaniesResponse> getListCompaniesMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "ListCompanies", requestType = com.google.cloud.talent.v4beta1.ListCompaniesRequest.class, responseType = com.google.cloud.talent.v4beta1.ListCompaniesResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.talent.v4beta1.ListCompaniesRequest, com.google.cloud.talent.v4beta1.ListCompaniesResponse> getListCompaniesMethod() { io.grpc.MethodDescriptor< com.google.cloud.talent.v4beta1.ListCompaniesRequest, com.google.cloud.talent.v4beta1.ListCompaniesResponse> getListCompaniesMethod; if ((getListCompaniesMethod = CompanyServiceGrpc.getListCompaniesMethod) == null) { synchronized (CompanyServiceGrpc.class) { if ((getListCompaniesMethod = CompanyServiceGrpc.getListCompaniesMethod) == null) { CompanyServiceGrpc.getListCompaniesMethod = getListCompaniesMethod = io.grpc.MethodDescriptor .<com.google.cloud.talent.v4beta1.ListCompaniesRequest, com.google.cloud.talent.v4beta1.ListCompaniesResponse> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListCompanies")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.talent.v4beta1.ListCompaniesRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.talent.v4beta1.ListCompaniesResponse .getDefaultInstance())) .setSchemaDescriptor( new CompanyServiceMethodDescriptorSupplier("ListCompanies")) .build(); } } } return getListCompaniesMethod; } /** Creates a new async stub that supports all call types for the service */ public static CompanyServiceStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<CompanyServiceStub> factory = new io.grpc.stub.AbstractStub.StubFactory<CompanyServiceStub>() { @java.lang.Override public CompanyServiceStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new CompanyServiceStub(channel, callOptions); } }; return CompanyServiceStub.newStub(factory, channel); } /** Creates a new blocking-style stub that supports all types of calls on the service */ public static CompanyServiceBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<CompanyServiceBlockingV2Stub> factory = new io.grpc.stub.AbstractStub.StubFactory<CompanyServiceBlockingV2Stub>() { @java.lang.Override public CompanyServiceBlockingV2Stub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new CompanyServiceBlockingV2Stub(channel, callOptions); } }; return CompanyServiceBlockingV2Stub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static CompanyServiceBlockingStub newBlockingStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<CompanyServiceBlockingStub> factory = new io.grpc.stub.AbstractStub.StubFactory<CompanyServiceBlockingStub>() { @java.lang.Override public CompanyServiceBlockingStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new CompanyServiceBlockingStub(channel, callOptions); } }; return CompanyServiceBlockingStub.newStub(factory, channel); } /** Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static CompanyServiceFutureStub newFutureStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<CompanyServiceFutureStub> factory = new io.grpc.stub.AbstractStub.StubFactory<CompanyServiceFutureStub>() { @java.lang.Override public CompanyServiceFutureStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new CompanyServiceFutureStub(channel, callOptions); } }; return CompanyServiceFutureStub.newStub(factory, channel); } /** * * * <pre> * A service that handles company management, including CRUD and enumeration. * </pre> */ public interface AsyncService { /** * * * <pre> * Creates a new company entity. * </pre> */ default void createCompany( com.google.cloud.talent.v4beta1.CreateCompanyRequest request, io.grpc.stub.StreamObserver<com.google.cloud.talent.v4beta1.Company> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getCreateCompanyMethod(), responseObserver); } /** * * * <pre> * Retrieves specified company. * </pre> */ default void getCompany( com.google.cloud.talent.v4beta1.GetCompanyRequest request, io.grpc.stub.StreamObserver<com.google.cloud.talent.v4beta1.Company> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetCompanyMethod(), responseObserver); } /** * * * <pre> * Updates specified company. * </pre> */ default void updateCompany( com.google.cloud.talent.v4beta1.UpdateCompanyRequest request, io.grpc.stub.StreamObserver<com.google.cloud.talent.v4beta1.Company> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getUpdateCompanyMethod(), responseObserver); } /** * * * <pre> * Deletes specified company. * Prerequisite: The company has no jobs associated with it. * </pre> */ default void deleteCompany( com.google.cloud.talent.v4beta1.DeleteCompanyRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getDeleteCompanyMethod(), responseObserver); } /** * * * <pre> * Lists all companies associated with the project. * </pre> */ default void listCompanies( com.google.cloud.talent.v4beta1.ListCompaniesRequest request, io.grpc.stub.StreamObserver<com.google.cloud.talent.v4beta1.ListCompaniesResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getListCompaniesMethod(), responseObserver); } } /** * Base class for the server implementation of the service CompanyService. * * <pre> * A service that handles company management, including CRUD and enumeration. * </pre> */ public abstract static class CompanyServiceImplBase implements io.grpc.BindableService, AsyncService { @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return CompanyServiceGrpc.bindService(this); } } /** * A stub to allow clients to do asynchronous rpc calls to service CompanyService. * * <pre> * A service that handles company management, including CRUD and enumeration. * </pre> */ public static final class CompanyServiceStub extends io.grpc.stub.AbstractAsyncStub<CompanyServiceStub> { private CompanyServiceStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected CompanyServiceStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new CompanyServiceStub(channel, callOptions); } /** * * * <pre> * Creates a new company entity. * </pre> */ public void createCompany( com.google.cloud.talent.v4beta1.CreateCompanyRequest request, io.grpc.stub.StreamObserver<com.google.cloud.talent.v4beta1.Company> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getCreateCompanyMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Retrieves specified company. * </pre> */ public void getCompany( com.google.cloud.talent.v4beta1.GetCompanyRequest request, io.grpc.stub.StreamObserver<com.google.cloud.talent.v4beta1.Company> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getGetCompanyMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Updates specified company. * </pre> */ public void updateCompany( com.google.cloud.talent.v4beta1.UpdateCompanyRequest request, io.grpc.stub.StreamObserver<com.google.cloud.talent.v4beta1.Company> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getUpdateCompanyMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Deletes specified company. * Prerequisite: The company has no jobs associated with it. * </pre> */ public void deleteCompany( com.google.cloud.talent.v4beta1.DeleteCompanyRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getDeleteCompanyMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Lists all companies associated with the project. * </pre> */ public void listCompanies( com.google.cloud.talent.v4beta1.ListCompaniesRequest request, io.grpc.stub.StreamObserver<com.google.cloud.talent.v4beta1.ListCompaniesResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getListCompaniesMethod(), getCallOptions()), request, responseObserver); } } /** * A stub to allow clients to do synchronous rpc calls to service CompanyService. * * <pre> * A service that handles company management, including CRUD and enumeration. * </pre> */ public static final class CompanyServiceBlockingV2Stub extends io.grpc.stub.AbstractBlockingStub<CompanyServiceBlockingV2Stub> { private CompanyServiceBlockingV2Stub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected CompanyServiceBlockingV2Stub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new CompanyServiceBlockingV2Stub(channel, callOptions); } /** * * * <pre> * Creates a new company entity. * </pre> */ public com.google.cloud.talent.v4beta1.Company createCompany( com.google.cloud.talent.v4beta1.CreateCompanyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getCreateCompanyMethod(), getCallOptions(), request); } /** * * * <pre> * Retrieves specified company. * </pre> */ public com.google.cloud.talent.v4beta1.Company getCompany( com.google.cloud.talent.v4beta1.GetCompanyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetCompanyMethod(), getCallOptions(), request); } /** * * * <pre> * Updates specified company. * </pre> */ public com.google.cloud.talent.v4beta1.Company updateCompany( com.google.cloud.talent.v4beta1.UpdateCompanyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getUpdateCompanyMethod(), getCallOptions(), request); } /** * * * <pre> * Deletes specified company. * Prerequisite: The company has no jobs associated with it. * </pre> */ public com.google.protobuf.Empty deleteCompany( com.google.cloud.talent.v4beta1.DeleteCompanyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDeleteCompanyMethod(), getCallOptions(), request); } /** * * * <pre> * Lists all companies associated with the project. * </pre> */ public com.google.cloud.talent.v4beta1.ListCompaniesResponse listCompanies( com.google.cloud.talent.v4beta1.ListCompaniesRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListCompaniesMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do limited synchronous rpc calls to service CompanyService. * * <pre> * A service that handles company management, including CRUD and enumeration. * </pre> */ public static final class CompanyServiceBlockingStub extends io.grpc.stub.AbstractBlockingStub<CompanyServiceBlockingStub> { private CompanyServiceBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected CompanyServiceBlockingStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new CompanyServiceBlockingStub(channel, callOptions); } /** * * * <pre> * Creates a new company entity. * </pre> */ public com.google.cloud.talent.v4beta1.Company createCompany( com.google.cloud.talent.v4beta1.CreateCompanyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getCreateCompanyMethod(), getCallOptions(), request); } /** * * * <pre> * Retrieves specified company. * </pre> */ public com.google.cloud.talent.v4beta1.Company getCompany( com.google.cloud.talent.v4beta1.GetCompanyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetCompanyMethod(), getCallOptions(), request); } /** * * * <pre> * Updates specified company. * </pre> */ public com.google.cloud.talent.v4beta1.Company updateCompany( com.google.cloud.talent.v4beta1.UpdateCompanyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getUpdateCompanyMethod(), getCallOptions(), request); } /** * * * <pre> * Deletes specified company. * Prerequisite: The company has no jobs associated with it. * </pre> */ public com.google.protobuf.Empty deleteCompany( com.google.cloud.talent.v4beta1.DeleteCompanyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDeleteCompanyMethod(), getCallOptions(), request); } /** * * * <pre> * Lists all companies associated with the project. * </pre> */ public com.google.cloud.talent.v4beta1.ListCompaniesResponse listCompanies( com.google.cloud.talent.v4beta1.ListCompaniesRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListCompaniesMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do ListenableFuture-style rpc calls to service CompanyService. * * <pre> * A service that handles company management, including CRUD and enumeration. * </pre> */ public static final class CompanyServiceFutureStub extends io.grpc.stub.AbstractFutureStub<CompanyServiceFutureStub> { private CompanyServiceFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected CompanyServiceFutureStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new CompanyServiceFutureStub(channel, callOptions); } /** * * * <pre> * Creates a new company entity. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.talent.v4beta1.Company> createCompany(com.google.cloud.talent.v4beta1.CreateCompanyRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getCreateCompanyMethod(), getCallOptions()), request); } /** * * * <pre> * Retrieves specified company. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.talent.v4beta1.Company> getCompany(com.google.cloud.talent.v4beta1.GetCompanyRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getGetCompanyMethod(), getCallOptions()), request); } /** * * * <pre> * Updates specified company. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.talent.v4beta1.Company> updateCompany(com.google.cloud.talent.v4beta1.UpdateCompanyRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getUpdateCompanyMethod(), getCallOptions()), request); } /** * * * <pre> * Deletes specified company. * Prerequisite: The company has no jobs associated with it. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.Empty> deleteCompany(com.google.cloud.talent.v4beta1.DeleteCompanyRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getDeleteCompanyMethod(), getCallOptions()), request); } /** * * * <pre> * Lists all companies associated with the project. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.talent.v4beta1.ListCompaniesResponse> listCompanies(com.google.cloud.talent.v4beta1.ListCompaniesRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getListCompaniesMethod(), getCallOptions()), request); } } private static final int METHODID_CREATE_COMPANY = 0; private static final int METHODID_GET_COMPANY = 1; private static final int METHODID_UPDATE_COMPANY = 2; private static final int METHODID_DELETE_COMPANY = 3; private static final int METHODID_LIST_COMPANIES = 4; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final AsyncService serviceImpl; private final int methodId; MethodHandlers(AsyncService serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_CREATE_COMPANY: serviceImpl.createCompany( (com.google.cloud.talent.v4beta1.CreateCompanyRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.talent.v4beta1.Company>) responseObserver); break; case METHODID_GET_COMPANY: serviceImpl.getCompany( (com.google.cloud.talent.v4beta1.GetCompanyRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.talent.v4beta1.Company>) responseObserver); break; case METHODID_UPDATE_COMPANY: serviceImpl.updateCompany( (com.google.cloud.talent.v4beta1.UpdateCompanyRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.talent.v4beta1.Company>) responseObserver); break; case METHODID_DELETE_COMPANY: serviceImpl.deleteCompany( (com.google.cloud.talent.v4beta1.DeleteCompanyRequest) request, (io.grpc.stub.StreamObserver<com.google.protobuf.Empty>) responseObserver); break; case METHODID_LIST_COMPANIES: serviceImpl.listCompanies( (com.google.cloud.talent.v4beta1.ListCompaniesRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.talent.v4beta1.ListCompaniesResponse>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getCreateCompanyMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.talent.v4beta1.CreateCompanyRequest, com.google.cloud.talent.v4beta1.Company>(service, METHODID_CREATE_COMPANY))) .addMethod( getGetCompanyMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.talent.v4beta1.GetCompanyRequest, com.google.cloud.talent.v4beta1.Company>(service, METHODID_GET_COMPANY))) .addMethod( getUpdateCompanyMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.talent.v4beta1.UpdateCompanyRequest, com.google.cloud.talent.v4beta1.Company>(service, METHODID_UPDATE_COMPANY))) .addMethod( getDeleteCompanyMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.talent.v4beta1.DeleteCompanyRequest, com.google.protobuf.Empty>(service, METHODID_DELETE_COMPANY))) .addMethod( getListCompaniesMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.talent.v4beta1.ListCompaniesRequest, com.google.cloud.talent.v4beta1.ListCompaniesResponse>( service, METHODID_LIST_COMPANIES))) .build(); } private abstract static class CompanyServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { CompanyServiceBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.google.cloud.talent.v4beta1.CompanyServiceProto.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("CompanyService"); } } private static final class CompanyServiceFileDescriptorSupplier extends CompanyServiceBaseDescriptorSupplier { CompanyServiceFileDescriptorSupplier() {} } private static final class CompanyServiceMethodDescriptorSupplier extends CompanyServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final java.lang.String methodName; CompanyServiceMethodDescriptorSupplier(java.lang.String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (CompanyServiceGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new CompanyServiceFileDescriptorSupplier()) .addMethod(getCreateCompanyMethod()) .addMethod(getGetCompanyMethod()) .addMethod(getUpdateCompanyMethod()) .addMethod(getDeleteCompanyMethod()) .addMethod(getListCompaniesMethod()) .build(); } } } return result; } }
apache/maven-resolver
36,414
maven-resolver-api/src/main/java/org/eclipse/aether/DefaultRepositorySystemSession.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.eclipse.aether; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.function.Function; import org.eclipse.aether.artifact.ArtifactType; import org.eclipse.aether.artifact.ArtifactTypeRegistry; import org.eclipse.aether.collection.DependencyGraphTransformer; import org.eclipse.aether.collection.DependencyManager; import org.eclipse.aether.collection.DependencySelector; import org.eclipse.aether.collection.DependencyTraverser; import org.eclipse.aether.collection.VersionFilter; import org.eclipse.aether.repository.Authentication; import org.eclipse.aether.repository.AuthenticationSelector; import org.eclipse.aether.repository.LocalRepository; import org.eclipse.aether.repository.LocalRepositoryManager; import org.eclipse.aether.repository.MirrorSelector; import org.eclipse.aether.repository.Proxy; import org.eclipse.aether.repository.ProxySelector; import org.eclipse.aether.repository.RemoteRepository; import org.eclipse.aether.repository.RepositoryPolicy; import org.eclipse.aether.repository.WorkspaceReader; import org.eclipse.aether.resolution.ArtifactDescriptorPolicy; import org.eclipse.aether.resolution.ResolutionErrorPolicy; import org.eclipse.aether.scope.ScopeManager; import org.eclipse.aether.scope.SystemDependencyScope; import org.eclipse.aether.transfer.TransferListener; import static java.util.Objects.requireNonNull; /** * A legacy repository system session. It is usable to "derive" sessions from existing session instances (using * copy-constructor), but the recommended way to derive sessions is using * {@link org.eclipse.aether.RepositorySystemSession.SessionBuilder#withRepositorySystemSession(RepositorySystemSession)} * instead. * <p> * <em>Important: while the default constructor on this class is deprecated only, it is left only to guarantee * backward compatibility with legacy code, but the default constructor should not be used anymore. Using that * constructor will lead to resource leaks.</em> * <p> * <strong>Note:</strong> This class is not thread-safe. It is assumed that the mutators get only called during an * initialization phase and that the session itself is not changed once initialized and being used by the repository * system. It is recommended to call {@link #setReadOnly()} once the session has been fully initialized to prevent * accidental manipulation of it afterward. * * @see RepositorySystem#createSessionBuilder() * @see RepositorySystemSession.SessionBuilder * @see RepositorySystemSession.CloseableSession */ public final class DefaultRepositorySystemSession implements RepositorySystemSession { private boolean readOnly; private boolean offline; private boolean ignoreArtifactDescriptorRepositories; private ResolutionErrorPolicy resolutionErrorPolicy; private ArtifactDescriptorPolicy artifactDescriptorPolicy; private String checksumPolicy; private String artifactUpdatePolicy; private String metadataUpdatePolicy; private LocalRepositoryManager localRepositoryManager; private WorkspaceReader workspaceReader; private RepositoryListener repositoryListener; private TransferListener transferListener; private Map<String, String> systemProperties; private Map<String, String> systemPropertiesView; private Map<String, String> userProperties; private Map<String, String> userPropertiesView; private Map<String, Object> configProperties; private Map<String, Object> configPropertiesView; private MirrorSelector mirrorSelector; private ProxySelector proxySelector; private AuthenticationSelector authenticationSelector; private ArtifactTypeRegistry artifactTypeRegistry; private DependencyTraverser dependencyTraverser; private DependencyManager dependencyManager; private DependencySelector dependencySelector; private VersionFilter versionFilter; private DependencyGraphTransformer dependencyGraphTransformer; private SessionData data; private RepositoryCache cache; private ScopeManager scopeManager; private final Function<Runnable, Boolean> onSessionEndedRegistrar; /** * Creates an uninitialized session. <em>Note:</em> The new session is not ready to use, as a bare minimum, * {@link #setLocalRepositoryManager(LocalRepositoryManager)} needs to be called but usually other settings also * need to be customized to achieve meaningful behavior. * * @deprecated This way of creating session should be avoided, is in place just to offer backward binary * compatibility with Resolver 1.x using code, but offers reduced functionality. * Use {@link RepositorySystem#createSessionBuilder()} instead. */ @Deprecated public DefaultRepositorySystemSession() { this(h -> false); } /** * Creates an uninitialized session. <em>Note:</em> The new session is not ready to use, as a bare minimum, * {@link #setLocalRepositoryManager(LocalRepositoryManager)} needs to be called but usually other settings also * need to be customized to achieve meaningful behavior. * <p> * Note: preferred way to create sessions is {@link RepositorySystem#createSessionBuilder()}, as then client code * does not have to fiddle with session close callbacks. This constructor is meant more for testing purposes. * * @since 2.0.0 */ public DefaultRepositorySystemSession(Function<Runnable, Boolean> onSessionEndedRegistrar) { systemProperties = new HashMap<>(); systemPropertiesView = Collections.unmodifiableMap(systemProperties); userProperties = new HashMap<>(); userPropertiesView = Collections.unmodifiableMap(userProperties); configProperties = new HashMap<>(); configPropertiesView = Collections.unmodifiableMap(configProperties); mirrorSelector = NullMirrorSelector.INSTANCE; proxySelector = PassthroughProxySelector.INSTANCE; authenticationSelector = PassthroughAuthenticationSelector.INSTANCE; artifactTypeRegistry = NullArtifactTypeRegistry.INSTANCE; data = new DefaultSessionData(); this.onSessionEndedRegistrar = requireNonNull(onSessionEndedRegistrar, "onSessionEndedRegistrar"); } /** * Creates a shallow copy of the specified session. Actually, the copy is not completely shallow, all maps holding * system/user/config properties are copied as well. In other words, invoking any mutator on the new session itself * has no effect on the original session. Other mutable objects like the session data and cache (if any) are not * copied and will be shared with the original session unless reconfigured. * * @param session The session to copy, must not be {@code null}. */ public DefaultRepositorySystemSession(RepositorySystemSession session) { requireNonNull(session, "repository system session cannot be null"); setOffline(session.isOffline()); setIgnoreArtifactDescriptorRepositories(session.isIgnoreArtifactDescriptorRepositories()); setResolutionErrorPolicy(session.getResolutionErrorPolicy()); setArtifactDescriptorPolicy(session.getArtifactDescriptorPolicy()); setChecksumPolicy(session.getChecksumPolicy()); setUpdatePolicy(session.getUpdatePolicy()); setMetadataUpdatePolicy(session.getMetadataUpdatePolicy()); setLocalRepositoryManager(session.getLocalRepositoryManager()); setWorkspaceReader(session.getWorkspaceReader()); setRepositoryListener(session.getRepositoryListener()); setTransferListener(session.getTransferListener()); setSystemProperties(session.getSystemProperties()); setUserProperties(session.getUserProperties()); setConfigProperties(session.getConfigProperties()); setMirrorSelector(session.getMirrorSelector()); setProxySelector(session.getProxySelector()); setAuthenticationSelector(session.getAuthenticationSelector()); setArtifactTypeRegistry(session.getArtifactTypeRegistry()); setDependencyTraverser(session.getDependencyTraverser()); setDependencyManager(session.getDependencyManager()); setDependencySelector(session.getDependencySelector()); setVersionFilter(session.getVersionFilter()); setDependencyGraphTransformer(session.getDependencyGraphTransformer()); setData(session.getData()); setCache(session.getCache()); setScopeManager(session.getScopeManager()); this.onSessionEndedRegistrar = session::addOnSessionEndedHandler; } @Override public boolean isOffline() { return offline; } /** * Controls whether the repository system operates in offline mode and avoids/refuses any access to remote * repositories. * * @param offline {@code true} if the repository system is in offline mode, {@code false} otherwise. * @return This session for chaining, never {@code null}. */ public DefaultRepositorySystemSession setOffline(boolean offline) { verifyStateForMutation(); this.offline = offline; return this; } @Override public boolean isIgnoreArtifactDescriptorRepositories() { return ignoreArtifactDescriptorRepositories; } /** * Controls whether repositories declared in artifact descriptors should be ignored during transitive dependency * collection. If enabled, only the repositories originally provided with the collect request will be considered. * * @param ignoreArtifactDescriptorRepositories {@code true} to ignore additional repositories from artifact * descriptors, {@code false} to merge those with the originally * specified repositories. * @return This session for chaining, never {@code null}. */ public DefaultRepositorySystemSession setIgnoreArtifactDescriptorRepositories( boolean ignoreArtifactDescriptorRepositories) { verifyStateForMutation(); this.ignoreArtifactDescriptorRepositories = ignoreArtifactDescriptorRepositories; return this; } @Override public ResolutionErrorPolicy getResolutionErrorPolicy() { return resolutionErrorPolicy; } /** * Sets the policy which controls whether resolutions errors from remote repositories should be cached. * * @param resolutionErrorPolicy The resolution error policy for this session, may be {@code null} if resolution * errors should generally not be cached. * @return This session for chaining, never {@code null}. */ public DefaultRepositorySystemSession setResolutionErrorPolicy(ResolutionErrorPolicy resolutionErrorPolicy) { verifyStateForMutation(); this.resolutionErrorPolicy = resolutionErrorPolicy; return this; } @Override public ArtifactDescriptorPolicy getArtifactDescriptorPolicy() { return artifactDescriptorPolicy; } /** * Sets the policy which controls how errors related to reading artifact descriptors should be handled. * * @param artifactDescriptorPolicy The descriptor error policy for this session, may be {@code null} if descriptor * errors should generally not be tolerated. * @return This session for chaining, never {@code null}. */ public DefaultRepositorySystemSession setArtifactDescriptorPolicy( ArtifactDescriptorPolicy artifactDescriptorPolicy) { verifyStateForMutation(); this.artifactDescriptorPolicy = artifactDescriptorPolicy; return this; } @Override public String getChecksumPolicy() { return checksumPolicy; } /** * Sets the global checksum policy. If set, the global checksum policy overrides the checksum policies of the remote * repositories being used for resolution. * * @param checksumPolicy The global checksum policy, may be {@code null}/empty to apply the per-repository policies. * @return This session for chaining, never {@code null}. * @see RepositoryPolicy#CHECKSUM_POLICY_FAIL * @see RepositoryPolicy#CHECKSUM_POLICY_IGNORE * @see RepositoryPolicy#CHECKSUM_POLICY_WARN */ public DefaultRepositorySystemSession setChecksumPolicy(String checksumPolicy) { verifyStateForMutation(); this.checksumPolicy = checksumPolicy; return this; } @Override public String getUpdatePolicy() { return getArtifactUpdatePolicy(); } /** * Sets the global update policy. If set, the global update policy overrides the update policies of the remote * repositories being used for resolution. * <p> * This method is meant for code that does not want to distinguish between artifact and metadata policies. * Note: applications should either use get/set updatePolicy (this method and * {@link RepositorySystemSession#getUpdatePolicy()}) or also distinguish between artifact and * metadata update policies (and use other methods), but <em>should not mix the two!</em> * * @param updatePolicy The global update policy, may be {@code null}/empty to apply the per-repository policies. * @return This session for chaining, never {@code null}. * @see RepositoryPolicy#UPDATE_POLICY_ALWAYS * @see RepositoryPolicy#UPDATE_POLICY_DAILY * @see RepositoryPolicy#UPDATE_POLICY_NEVER * @see #setArtifactUpdatePolicy(String) * @see #setMetadataUpdatePolicy(String) */ public DefaultRepositorySystemSession setUpdatePolicy(String updatePolicy) { verifyStateForMutation(); setArtifactUpdatePolicy(updatePolicy); setMetadataUpdatePolicy(updatePolicy); return this; } @Override public String getArtifactUpdatePolicy() { return artifactUpdatePolicy; } /** * Sets the global artifact update policy. If set, the global update policy overrides the artifact update policies * of the remote repositories being used for resolution. * * @param artifactUpdatePolicy The global update policy, may be {@code null}/empty to apply the per-repository policies. * @return This session for chaining, never {@code null}. * @see RepositoryPolicy#UPDATE_POLICY_ALWAYS * @see RepositoryPolicy#UPDATE_POLICY_DAILY * @see RepositoryPolicy#UPDATE_POLICY_NEVER * @since 2.0.0 */ public DefaultRepositorySystemSession setArtifactUpdatePolicy(String artifactUpdatePolicy) { verifyStateForMutation(); this.artifactUpdatePolicy = artifactUpdatePolicy; return this; } @Override public String getMetadataUpdatePolicy() { return metadataUpdatePolicy; } /** * Sets the global metadata update policy. If set, the global update policy overrides the metadata update policies * of the remote repositories being used for resolution. * * @param metadataUpdatePolicy The global update policy, may be {@code null}/empty to apply the per-repository policies. * @return This session for chaining, never {@code null}. * @see RepositoryPolicy#UPDATE_POLICY_ALWAYS * @see RepositoryPolicy#UPDATE_POLICY_DAILY * @see RepositoryPolicy#UPDATE_POLICY_NEVER * @since 2.0.0 */ public DefaultRepositorySystemSession setMetadataUpdatePolicy(String metadataUpdatePolicy) { verifyStateForMutation(); this.metadataUpdatePolicy = metadataUpdatePolicy; return this; } @Override public LocalRepository getLocalRepository() { LocalRepositoryManager lrm = getLocalRepositoryManager(); return (lrm != null) ? lrm.getRepository() : null; } @Override public LocalRepositoryManager getLocalRepositoryManager() { return localRepositoryManager; } /** * Sets the local repository manager used during this session. <em>Note:</em> Eventually, a valid session must have * a local repository manager set. * * @param localRepositoryManager The local repository manager used during this session, may be {@code null}. * @return This session for chaining, never {@code null}. */ public DefaultRepositorySystemSession setLocalRepositoryManager(LocalRepositoryManager localRepositoryManager) { verifyStateForMutation(); this.localRepositoryManager = localRepositoryManager; return this; } @Override public WorkspaceReader getWorkspaceReader() { return workspaceReader; } /** * Sets the workspace reader used during this session. If set, the workspace reader will usually be consulted first * to resolve artifacts. * * @param workspaceReader The workspace reader for this session, may be {@code null} if none. * @return This session for chaining, never {@code null}. */ public DefaultRepositorySystemSession setWorkspaceReader(WorkspaceReader workspaceReader) { verifyStateForMutation(); this.workspaceReader = workspaceReader; return this; } @Override public RepositoryListener getRepositoryListener() { return repositoryListener; } /** * Sets the listener being notified of actions in the repository system. * * @param repositoryListener The repository listener, may be {@code null} if none. * @return This session for chaining, never {@code null}. */ public DefaultRepositorySystemSession setRepositoryListener(RepositoryListener repositoryListener) { verifyStateForMutation(); this.repositoryListener = repositoryListener; return this; } @Override public TransferListener getTransferListener() { return transferListener; } /** * Sets the listener being notified of uploads/downloads by the repository system. * * @param transferListener The transfer listener, may be {@code null} if none. * @return This session for chaining, never {@code null}. */ public DefaultRepositorySystemSession setTransferListener(TransferListener transferListener) { verifyStateForMutation(); this.transferListener = transferListener; return this; } private <T> Map<String, T> copySafe(Map<?, ?> table, Class<T> valueType) { Map<String, T> map; if (table == null || table.isEmpty()) { map = new HashMap<>(); } else { map = new HashMap<>((int) (table.size() / 0.75f) + 1); for (Map.Entry<?, ?> entry : table.entrySet()) { Object key = entry.getKey(); if (key instanceof String) { Object value = entry.getValue(); if (valueType.isInstance(value)) { map.put(key.toString(), valueType.cast(value)); } } } } return map; } @Override public Map<String, String> getSystemProperties() { return systemPropertiesView; } /** * Sets the system properties to use, e.g. for processing of artifact descriptors. System properties are usually * collected from the runtime environment like {@link System#getProperties()} and environment variables. * <p> * <em>Note:</em> System properties are of type {@code Map<String, String>} and any key-value pair in the input map * that doesn't match this type will be silently ignored. * * @param systemProperties The system properties, may be {@code null} or empty if none. * @return This session for chaining, never {@code null}. */ public DefaultRepositorySystemSession setSystemProperties(Map<?, ?> systemProperties) { verifyStateForMutation(); this.systemProperties = copySafe(systemProperties, String.class); systemPropertiesView = Collections.unmodifiableMap(this.systemProperties); return this; } /** * Sets the specified system property. * * @param key The property key, must not be {@code null}. * @param value The property value, may be {@code null} to remove/unset the property. * @return This session for chaining, never {@code null}. */ public DefaultRepositorySystemSession setSystemProperty(String key, String value) { verifyStateForMutation(); if (value != null) { systemProperties.put(key, value); } else { systemProperties.remove(key); } return this; } @Override public Map<String, String> getUserProperties() { return userPropertiesView; } /** * Sets the user properties to use, e.g. for processing of artifact descriptors. User properties are similar to * system properties but are set on the discretion of the user and hence are considered of higher priority than * system properties in case of conflicts. * <p> * <em>Note:</em> User properties are of type {@code Map<String, String>} and any key-value pair in the input map * that doesn't match this type will be silently ignored. * * @param userProperties The user properties, may be {@code null} or empty if none. * @return This session for chaining, never {@code null}. */ public DefaultRepositorySystemSession setUserProperties(Map<?, ?> userProperties) { verifyStateForMutation(); this.userProperties = copySafe(userProperties, String.class); userPropertiesView = Collections.unmodifiableMap(this.userProperties); return this; } /** * Sets the specified user property. * * @param key The property key, must not be {@code null}. * @param value The property value, may be {@code null} to remove/unset the property. * @return This session for chaining, never {@code null}. */ public DefaultRepositorySystemSession setUserProperty(String key, String value) { verifyStateForMutation(); if (value != null) { userProperties.put(key, value); } else { userProperties.remove(key); } return this; } @Override public Map<String, Object> getConfigProperties() { return configPropertiesView; } /** * Sets the configuration properties used to tweak internal aspects of the repository system (e.g. thread pooling, * connector-specific behavior, etc.). * <p> * <em>Note:</em> Configuration properties are of type {@code Map<String, Object>} and any key-value pair in the * input map that doesn't match this type will be silently ignored. * * @param configProperties The configuration properties, may be {@code null} or empty if none. * @return This session for chaining, never {@code null}. */ public DefaultRepositorySystemSession setConfigProperties(Map<?, ?> configProperties) { verifyStateForMutation(); this.configProperties = copySafe(configProperties, Object.class); configPropertiesView = Collections.unmodifiableMap(this.configProperties); return this; } /** * Sets the specified configuration property. * * @param key The property key, must not be {@code null}. * @param value The property value, may be {@code null} to remove/unset the property. * @return This session for chaining, never {@code null}. */ public DefaultRepositorySystemSession setConfigProperty(String key, Object value) { verifyStateForMutation(); if (value != null) { configProperties.put(key, value); } else { configProperties.remove(key); } return this; } @Override public MirrorSelector getMirrorSelector() { return mirrorSelector; } /** * Sets the mirror selector to use for repositories discovered in artifact descriptors. Note that this selector is * not used for remote repositories which are passed as request parameters to the repository system, those * repositories are supposed to denote the effective repositories. * * @param mirrorSelector The mirror selector to use, may be {@code null}. * @return This session for chaining, never {@code null}. */ public DefaultRepositorySystemSession setMirrorSelector(MirrorSelector mirrorSelector) { verifyStateForMutation(); this.mirrorSelector = mirrorSelector; if (this.mirrorSelector == null) { this.mirrorSelector = NullMirrorSelector.INSTANCE; } return this; } @Override public ProxySelector getProxySelector() { return proxySelector; } /** * Sets the proxy selector to use for repositories discovered in artifact descriptors. Note that this selector is * not used for remote repositories which are passed as request parameters to the repository system, those * repositories are supposed to have their proxy (if any) already set. * * @param proxySelector The proxy selector to use, may be {@code null}. * @return This session for chaining, never {@code null}. * @see org.eclipse.aether.repository.RemoteRepository#getProxy() */ public DefaultRepositorySystemSession setProxySelector(ProxySelector proxySelector) { verifyStateForMutation(); this.proxySelector = proxySelector; if (this.proxySelector == null) { this.proxySelector = PassthroughProxySelector.INSTANCE; } return this; } @Override public AuthenticationSelector getAuthenticationSelector() { return authenticationSelector; } /** * Sets the authentication selector to use for repositories discovered in artifact descriptors. Note that this * selector is not used for remote repositories which are passed as request parameters to the repository system, * those repositories are supposed to have their authentication (if any) already set. * * @param authenticationSelector The authentication selector to use, may be {@code null}. * @return This session for chaining, never {@code null}. * @see org.eclipse.aether.repository.RemoteRepository#getAuthentication() */ public DefaultRepositorySystemSession setAuthenticationSelector(AuthenticationSelector authenticationSelector) { verifyStateForMutation(); this.authenticationSelector = authenticationSelector; if (this.authenticationSelector == null) { this.authenticationSelector = PassthroughAuthenticationSelector.INSTANCE; } return this; } @Override public ArtifactTypeRegistry getArtifactTypeRegistry() { return artifactTypeRegistry; } /** * Sets the registry of artifact types recognized by this session. * * @param artifactTypeRegistry The artifact type registry, may be {@code null}. * @return This session for chaining, never {@code null}. */ public DefaultRepositorySystemSession setArtifactTypeRegistry(ArtifactTypeRegistry artifactTypeRegistry) { verifyStateForMutation(); this.artifactTypeRegistry = artifactTypeRegistry; if (this.artifactTypeRegistry == null) { this.artifactTypeRegistry = NullArtifactTypeRegistry.INSTANCE; } return this; } @Override public DependencyTraverser getDependencyTraverser() { return dependencyTraverser; } /** * Sets the dependency traverser to use for building dependency graphs. * * @param dependencyTraverser The dependency traverser to use for building dependency graphs, may be {@code null}. * @return This session for chaining, never {@code null}. */ public DefaultRepositorySystemSession setDependencyTraverser(DependencyTraverser dependencyTraverser) { verifyStateForMutation(); this.dependencyTraverser = dependencyTraverser; return this; } @Override public DependencyManager getDependencyManager() { return dependencyManager; } /** * Sets the dependency manager to use for building dependency graphs. * * @param dependencyManager The dependency manager to use for building dependency graphs, may be {@code null}. * @return This session for chaining, never {@code null}. */ public DefaultRepositorySystemSession setDependencyManager(DependencyManager dependencyManager) { verifyStateForMutation(); this.dependencyManager = dependencyManager; return this; } @Override public DependencySelector getDependencySelector() { return dependencySelector; } /** * Sets the dependency selector to use for building dependency graphs. * * @param dependencySelector The dependency selector to use for building dependency graphs, may be {@code null}. * @return This session for chaining, never {@code null}. */ public DefaultRepositorySystemSession setDependencySelector(DependencySelector dependencySelector) { verifyStateForMutation(); this.dependencySelector = dependencySelector; return this; } @Override public VersionFilter getVersionFilter() { return versionFilter; } /** * Sets the version filter to use for building dependency graphs. * * @param versionFilter The version filter to use for building dependency graphs, may be {@code null} to not filter * versions. * @return This session for chaining, never {@code null}. */ public DefaultRepositorySystemSession setVersionFilter(VersionFilter versionFilter) { verifyStateForMutation(); this.versionFilter = versionFilter; return this; } @Override public DependencyGraphTransformer getDependencyGraphTransformer() { return dependencyGraphTransformer; } /** * Sets the dependency graph transformer to use for building dependency graphs. * * @param dependencyGraphTransformer The dependency graph transformer to use for building dependency graphs, may be * {@code null}. * @return This session for chaining, never {@code null}. */ public DefaultRepositorySystemSession setDependencyGraphTransformer( DependencyGraphTransformer dependencyGraphTransformer) { verifyStateForMutation(); this.dependencyGraphTransformer = dependencyGraphTransformer; return this; } @Override public SessionData getData() { return data; } /** * Sets the custom data associated with this session. * * @param data The session data, may be {@code null}. * @return This session for chaining, never {@code null}. */ public DefaultRepositorySystemSession setData(SessionData data) { verifyStateForMutation(); this.data = data; if (this.data == null) { this.data = new DefaultSessionData(); } return this; } @Override public RepositoryCache getCache() { return cache; } /** * Sets the cache the repository system may use to save data for future reuse during the session. * * @param cache The repository cache, may be {@code null} if none. * @return This session for chaining, never {@code null}. */ public DefaultRepositorySystemSession setCache(RepositoryCache cache) { verifyStateForMutation(); this.cache = cache; return this; } @Override public ScopeManager getScopeManager() { return scopeManager; } /** * Sets the scope manager, may be {@code null}. * * @param scopeManager The scope manager, may be {@code null}. * @return The session for chaining, never {@code null}. * @since 2.0.0 */ public DefaultRepositorySystemSession setScopeManager(ScopeManager scopeManager) { verifyStateForMutation(); this.scopeManager = scopeManager; return this; } @Override public SystemDependencyScope getSystemDependencyScope() { if (scopeManager != null) { return scopeManager.getSystemDependencyScope().orElse(null); } else { return SystemDependencyScope.LEGACY; } } /** * Registers onSessionEnded handler, if able to. * * @param handler The handler to register * @return Return {@code true} if registration was possible, otherwise {@code false}. */ @Override public boolean addOnSessionEndedHandler(Runnable handler) { return onSessionEndedRegistrar.apply(handler); } /** * Marks this session as read-only such that any future attempts to call its mutators will fail with an exception. * Marking an already read-only session as read-only has no effect. The session's data and cache remain writable * though. */ public void setReadOnly() { readOnly = true; } /** * Verifies this instance state for mutation operations: mutated instance must not be read-only or closed. */ private void verifyStateForMutation() { if (readOnly) { throw new IllegalStateException("repository system session is read-only"); } } /** * Simple "pass through" implementation of {@link ProxySelector} that simply returns what passed in * {@link RemoteRepository} have set already, may return {@code null}. */ static class PassthroughProxySelector implements ProxySelector { public static final ProxySelector INSTANCE = new PassthroughProxySelector(); @Override public Proxy getProxy(RemoteRepository repository) { requireNonNull(repository, "repository cannot be null"); return repository.getProxy(); } } /** * Simple "null" implementation of {@link MirrorSelector} that returns {@code null} for any passed * in {@link RemoteRepository}. */ static class NullMirrorSelector implements MirrorSelector { public static final MirrorSelector INSTANCE = new NullMirrorSelector(); @Override public RemoteRepository getMirror(RemoteRepository repository) { requireNonNull(repository, "repository cannot be null"); return null; } } /** * Simple "pass through" implementation of {@link AuthenticationSelector} that simply returns what passed in * {@link RemoteRepository} have set already, may return {@code null}. */ static class PassthroughAuthenticationSelector implements AuthenticationSelector { public static final AuthenticationSelector INSTANCE = new PassthroughAuthenticationSelector(); @Override public Authentication getAuthentication(RemoteRepository repository) { requireNonNull(repository, "repository cannot be null"); return repository.getAuthentication(); } } /** * Simple "null" implementation of {@link ArtifactTypeRegistry} that returns {@code null} for any type ID. */ static final class NullArtifactTypeRegistry implements ArtifactTypeRegistry { public static final ArtifactTypeRegistry INSTANCE = new NullArtifactTypeRegistry(); @Override public ArtifactType get(String typeId) { return null; } } }
googleapis/google-cloud-java
36,029
java-texttospeech/proto-google-cloud-texttospeech-v1/src/main/java/com/google/cloud/texttospeech/v1/Voice.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/texttospeech/v1/cloud_tts.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.texttospeech.v1; /** * * * <pre> * Description of a voice supported by the TTS service. * </pre> * * Protobuf type {@code google.cloud.texttospeech.v1.Voice} */ public final class Voice extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.texttospeech.v1.Voice) VoiceOrBuilder { private static final long serialVersionUID = 0L; // Use Voice.newBuilder() to construct. private Voice(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Voice() { languageCodes_ = com.google.protobuf.LazyStringArrayList.emptyList(); name_ = ""; ssmlGender_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new Voice(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.texttospeech.v1.TextToSpeechProto .internal_static_google_cloud_texttospeech_v1_Voice_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.texttospeech.v1.TextToSpeechProto .internal_static_google_cloud_texttospeech_v1_Voice_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.texttospeech.v1.Voice.class, com.google.cloud.texttospeech.v1.Voice.Builder.class); } public static final int LANGUAGE_CODES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private com.google.protobuf.LazyStringArrayList languageCodes_ = com.google.protobuf.LazyStringArrayList.emptyList(); /** * * * <pre> * The languages that this voice supports, expressed as * [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags (e.g. * "en-US", "es-419", "cmn-tw"). * </pre> * * <code>repeated string language_codes = 1;</code> * * @return A list containing the languageCodes. */ public com.google.protobuf.ProtocolStringList getLanguageCodesList() { return languageCodes_; } /** * * * <pre> * The languages that this voice supports, expressed as * [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags (e.g. * "en-US", "es-419", "cmn-tw"). * </pre> * * <code>repeated string language_codes = 1;</code> * * @return The count of languageCodes. */ public int getLanguageCodesCount() { return languageCodes_.size(); } /** * * * <pre> * The languages that this voice supports, expressed as * [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags (e.g. * "en-US", "es-419", "cmn-tw"). * </pre> * * <code>repeated string language_codes = 1;</code> * * @param index The index of the element to return. * @return The languageCodes at the given index. */ public java.lang.String getLanguageCodes(int index) { return languageCodes_.get(index); } /** * * * <pre> * The languages that this voice supports, expressed as * [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags (e.g. * "en-US", "es-419", "cmn-tw"). * </pre> * * <code>repeated string language_codes = 1;</code> * * @param index The index of the value to return. * @return The bytes of the languageCodes at the given index. */ public com.google.protobuf.ByteString getLanguageCodesBytes(int index) { return languageCodes_.getByteString(index); } public static final int NAME_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * The name of this voice. Each distinct voice has a unique name. * </pre> * * <code>string name = 2;</code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * The name of this voice. Each distinct voice has a unique name. * </pre> * * <code>string name = 2;</code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int SSML_GENDER_FIELD_NUMBER = 3; private int ssmlGender_ = 0; /** * * * <pre> * The gender of this voice. * </pre> * * <code>.google.cloud.texttospeech.v1.SsmlVoiceGender ssml_gender = 3;</code> * * @return The enum numeric value on the wire for ssmlGender. */ @java.lang.Override public int getSsmlGenderValue() { return ssmlGender_; } /** * * * <pre> * The gender of this voice. * </pre> * * <code>.google.cloud.texttospeech.v1.SsmlVoiceGender ssml_gender = 3;</code> * * @return The ssmlGender. */ @java.lang.Override public com.google.cloud.texttospeech.v1.SsmlVoiceGender getSsmlGender() { com.google.cloud.texttospeech.v1.SsmlVoiceGender result = com.google.cloud.texttospeech.v1.SsmlVoiceGender.forNumber(ssmlGender_); return result == null ? com.google.cloud.texttospeech.v1.SsmlVoiceGender.UNRECOGNIZED : result; } public static final int NATURAL_SAMPLE_RATE_HERTZ_FIELD_NUMBER = 4; private int naturalSampleRateHertz_ = 0; /** * * * <pre> * The natural sample rate (in hertz) for this voice. * </pre> * * <code>int32 natural_sample_rate_hertz = 4;</code> * * @return The naturalSampleRateHertz. */ @java.lang.Override public int getNaturalSampleRateHertz() { return naturalSampleRateHertz_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < languageCodes_.size(); i++) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, languageCodes_.getRaw(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, name_); } if (ssmlGender_ != com.google.cloud.texttospeech.v1.SsmlVoiceGender.SSML_VOICE_GENDER_UNSPECIFIED .getNumber()) { output.writeEnum(3, ssmlGender_); } if (naturalSampleRateHertz_ != 0) { output.writeInt32(4, naturalSampleRateHertz_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < languageCodes_.size(); i++) { dataSize += computeStringSizeNoTag(languageCodes_.getRaw(i)); } size += dataSize; size += 1 * getLanguageCodesList().size(); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, name_); } if (ssmlGender_ != com.google.cloud.texttospeech.v1.SsmlVoiceGender.SSML_VOICE_GENDER_UNSPECIFIED .getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(3, ssmlGender_); } if (naturalSampleRateHertz_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(4, naturalSampleRateHertz_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.texttospeech.v1.Voice)) { return super.equals(obj); } com.google.cloud.texttospeech.v1.Voice other = (com.google.cloud.texttospeech.v1.Voice) obj; if (!getLanguageCodesList().equals(other.getLanguageCodesList())) return false; if (!getName().equals(other.getName())) return false; if (ssmlGender_ != other.ssmlGender_) return false; if (getNaturalSampleRateHertz() != other.getNaturalSampleRateHertz()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getLanguageCodesCount() > 0) { hash = (37 * hash) + LANGUAGE_CODES_FIELD_NUMBER; hash = (53 * hash) + getLanguageCodesList().hashCode(); } hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + SSML_GENDER_FIELD_NUMBER; hash = (53 * hash) + ssmlGender_; hash = (37 * hash) + NATURAL_SAMPLE_RATE_HERTZ_FIELD_NUMBER; hash = (53 * hash) + getNaturalSampleRateHertz(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.texttospeech.v1.Voice parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.texttospeech.v1.Voice parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.texttospeech.v1.Voice parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.texttospeech.v1.Voice parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.texttospeech.v1.Voice parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.texttospeech.v1.Voice parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.texttospeech.v1.Voice parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.texttospeech.v1.Voice parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.texttospeech.v1.Voice parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.texttospeech.v1.Voice parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.texttospeech.v1.Voice parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.texttospeech.v1.Voice parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.texttospeech.v1.Voice prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Description of a voice supported by the TTS service. * </pre> * * Protobuf type {@code google.cloud.texttospeech.v1.Voice} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.texttospeech.v1.Voice) com.google.cloud.texttospeech.v1.VoiceOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.texttospeech.v1.TextToSpeechProto .internal_static_google_cloud_texttospeech_v1_Voice_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.texttospeech.v1.TextToSpeechProto .internal_static_google_cloud_texttospeech_v1_Voice_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.texttospeech.v1.Voice.class, com.google.cloud.texttospeech.v1.Voice.Builder.class); } // Construct using com.google.cloud.texttospeech.v1.Voice.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; languageCodes_ = com.google.protobuf.LazyStringArrayList.emptyList(); name_ = ""; ssmlGender_ = 0; naturalSampleRateHertz_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.texttospeech.v1.TextToSpeechProto .internal_static_google_cloud_texttospeech_v1_Voice_descriptor; } @java.lang.Override public com.google.cloud.texttospeech.v1.Voice getDefaultInstanceForType() { return com.google.cloud.texttospeech.v1.Voice.getDefaultInstance(); } @java.lang.Override public com.google.cloud.texttospeech.v1.Voice build() { com.google.cloud.texttospeech.v1.Voice result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.texttospeech.v1.Voice buildPartial() { com.google.cloud.texttospeech.v1.Voice result = new com.google.cloud.texttospeech.v1.Voice(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.texttospeech.v1.Voice result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { languageCodes_.makeImmutable(); result.languageCodes_ = languageCodes_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.name_ = name_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.ssmlGender_ = ssmlGender_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.naturalSampleRateHertz_ = naturalSampleRateHertz_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.texttospeech.v1.Voice) { return mergeFrom((com.google.cloud.texttospeech.v1.Voice) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.texttospeech.v1.Voice other) { if (other == com.google.cloud.texttospeech.v1.Voice.getDefaultInstance()) return this; if (!other.languageCodes_.isEmpty()) { if (languageCodes_.isEmpty()) { languageCodes_ = other.languageCodes_; bitField0_ |= 0x00000001; } else { ensureLanguageCodesIsMutable(); languageCodes_.addAll(other.languageCodes_); } onChanged(); } if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000002; onChanged(); } if (other.ssmlGender_ != 0) { setSsmlGenderValue(other.getSsmlGenderValue()); } if (other.getNaturalSampleRateHertz() != 0) { setNaturalSampleRateHertz(other.getNaturalSampleRateHertz()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); ensureLanguageCodesIsMutable(); languageCodes_.add(s); break; } // case 10 case 18: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { ssmlGender_ = input.readEnum(); bitField0_ |= 0x00000004; break; } // case 24 case 32: { naturalSampleRateHertz_ = input.readInt32(); bitField0_ |= 0x00000008; break; } // case 32 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.protobuf.LazyStringArrayList languageCodes_ = com.google.protobuf.LazyStringArrayList.emptyList(); private void ensureLanguageCodesIsMutable() { if (!languageCodes_.isModifiable()) { languageCodes_ = new com.google.protobuf.LazyStringArrayList(languageCodes_); } bitField0_ |= 0x00000001; } /** * * * <pre> * The languages that this voice supports, expressed as * [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags (e.g. * "en-US", "es-419", "cmn-tw"). * </pre> * * <code>repeated string language_codes = 1;</code> * * @return A list containing the languageCodes. */ public com.google.protobuf.ProtocolStringList getLanguageCodesList() { languageCodes_.makeImmutable(); return languageCodes_; } /** * * * <pre> * The languages that this voice supports, expressed as * [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags (e.g. * "en-US", "es-419", "cmn-tw"). * </pre> * * <code>repeated string language_codes = 1;</code> * * @return The count of languageCodes. */ public int getLanguageCodesCount() { return languageCodes_.size(); } /** * * * <pre> * The languages that this voice supports, expressed as * [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags (e.g. * "en-US", "es-419", "cmn-tw"). * </pre> * * <code>repeated string language_codes = 1;</code> * * @param index The index of the element to return. * @return The languageCodes at the given index. */ public java.lang.String getLanguageCodes(int index) { return languageCodes_.get(index); } /** * * * <pre> * The languages that this voice supports, expressed as * [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags (e.g. * "en-US", "es-419", "cmn-tw"). * </pre> * * <code>repeated string language_codes = 1;</code> * * @param index The index of the value to return. * @return The bytes of the languageCodes at the given index. */ public com.google.protobuf.ByteString getLanguageCodesBytes(int index) { return languageCodes_.getByteString(index); } /** * * * <pre> * The languages that this voice supports, expressed as * [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags (e.g. * "en-US", "es-419", "cmn-tw"). * </pre> * * <code>repeated string language_codes = 1;</code> * * @param index The index to set the value at. * @param value The languageCodes to set. * @return This builder for chaining. */ public Builder setLanguageCodes(int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureLanguageCodesIsMutable(); languageCodes_.set(index, value); bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The languages that this voice supports, expressed as * [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags (e.g. * "en-US", "es-419", "cmn-tw"). * </pre> * * <code>repeated string language_codes = 1;</code> * * @param value The languageCodes to add. * @return This builder for chaining. */ public Builder addLanguageCodes(java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureLanguageCodesIsMutable(); languageCodes_.add(value); bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The languages that this voice supports, expressed as * [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags (e.g. * "en-US", "es-419", "cmn-tw"). * </pre> * * <code>repeated string language_codes = 1;</code> * * @param values The languageCodes to add. * @return This builder for chaining. */ public Builder addAllLanguageCodes(java.lang.Iterable<java.lang.String> values) { ensureLanguageCodesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, languageCodes_); bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The languages that this voice supports, expressed as * [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags (e.g. * "en-US", "es-419", "cmn-tw"). * </pre> * * <code>repeated string language_codes = 1;</code> * * @return This builder for chaining. */ public Builder clearLanguageCodes() { languageCodes_ = com.google.protobuf.LazyStringArrayList.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); ; onChanged(); return this; } /** * * * <pre> * The languages that this voice supports, expressed as * [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags (e.g. * "en-US", "es-419", "cmn-tw"). * </pre> * * <code>repeated string language_codes = 1;</code> * * @param value The bytes of the languageCodes to add. * @return This builder for chaining. */ public Builder addLanguageCodesBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); ensureLanguageCodesIsMutable(); languageCodes_.add(value); bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object name_ = ""; /** * * * <pre> * The name of this voice. Each distinct voice has a unique name. * </pre> * * <code>string name = 2;</code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The name of this voice. Each distinct voice has a unique name. * </pre> * * <code>string name = 2;</code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The name of this voice. Each distinct voice has a unique name. * </pre> * * <code>string name = 2;</code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The name of this voice. Each distinct voice has a unique name. * </pre> * * <code>string name = 2;</code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The name of this voice. Each distinct voice has a unique name. * </pre> * * <code>string name = 2;</code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private int ssmlGender_ = 0; /** * * * <pre> * The gender of this voice. * </pre> * * <code>.google.cloud.texttospeech.v1.SsmlVoiceGender ssml_gender = 3;</code> * * @return The enum numeric value on the wire for ssmlGender. */ @java.lang.Override public int getSsmlGenderValue() { return ssmlGender_; } /** * * * <pre> * The gender of this voice. * </pre> * * <code>.google.cloud.texttospeech.v1.SsmlVoiceGender ssml_gender = 3;</code> * * @param value The enum numeric value on the wire for ssmlGender to set. * @return This builder for chaining. */ public Builder setSsmlGenderValue(int value) { ssmlGender_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The gender of this voice. * </pre> * * <code>.google.cloud.texttospeech.v1.SsmlVoiceGender ssml_gender = 3;</code> * * @return The ssmlGender. */ @java.lang.Override public com.google.cloud.texttospeech.v1.SsmlVoiceGender getSsmlGender() { com.google.cloud.texttospeech.v1.SsmlVoiceGender result = com.google.cloud.texttospeech.v1.SsmlVoiceGender.forNumber(ssmlGender_); return result == null ? com.google.cloud.texttospeech.v1.SsmlVoiceGender.UNRECOGNIZED : result; } /** * * * <pre> * The gender of this voice. * </pre> * * <code>.google.cloud.texttospeech.v1.SsmlVoiceGender ssml_gender = 3;</code> * * @param value The ssmlGender to set. * @return This builder for chaining. */ public Builder setSsmlGender(com.google.cloud.texttospeech.v1.SsmlVoiceGender value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; ssmlGender_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * The gender of this voice. * </pre> * * <code>.google.cloud.texttospeech.v1.SsmlVoiceGender ssml_gender = 3;</code> * * @return This builder for chaining. */ public Builder clearSsmlGender() { bitField0_ = (bitField0_ & ~0x00000004); ssmlGender_ = 0; onChanged(); return this; } private int naturalSampleRateHertz_; /** * * * <pre> * The natural sample rate (in hertz) for this voice. * </pre> * * <code>int32 natural_sample_rate_hertz = 4;</code> * * @return The naturalSampleRateHertz. */ @java.lang.Override public int getNaturalSampleRateHertz() { return naturalSampleRateHertz_; } /** * * * <pre> * The natural sample rate (in hertz) for this voice. * </pre> * * <code>int32 natural_sample_rate_hertz = 4;</code> * * @param value The naturalSampleRateHertz to set. * @return This builder for chaining. */ public Builder setNaturalSampleRateHertz(int value) { naturalSampleRateHertz_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * The natural sample rate (in hertz) for this voice. * </pre> * * <code>int32 natural_sample_rate_hertz = 4;</code> * * @return This builder for chaining. */ public Builder clearNaturalSampleRateHertz() { bitField0_ = (bitField0_ & ~0x00000008); naturalSampleRateHertz_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.texttospeech.v1.Voice) } // @@protoc_insertion_point(class_scope:google.cloud.texttospeech.v1.Voice) private static final com.google.cloud.texttospeech.v1.Voice DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.texttospeech.v1.Voice(); } public static com.google.cloud.texttospeech.v1.Voice getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<Voice> PARSER = new com.google.protobuf.AbstractParser<Voice>() { @java.lang.Override public Voice parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<Voice> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<Voice> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.texttospeech.v1.Voice getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,135
java-recaptchaenterprise/proto-google-cloud-recaptchaenterprise-v1beta1/src/main/java/com/google/recaptchaenterprise/v1beta1/AccountDefenderAssessment.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/recaptchaenterprise/v1beta1/recaptchaenterprise.proto // Protobuf Java Version: 3.25.8 package com.google.recaptchaenterprise.v1beta1; /** * * * <pre> * Account defender risk assessment. * </pre> * * Protobuf type {@code google.cloud.recaptchaenterprise.v1beta1.AccountDefenderAssessment} */ public final class AccountDefenderAssessment extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.recaptchaenterprise.v1beta1.AccountDefenderAssessment) AccountDefenderAssessmentOrBuilder { private static final long serialVersionUID = 0L; // Use AccountDefenderAssessment.newBuilder() to construct. private AccountDefenderAssessment(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AccountDefenderAssessment() { labels_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new AccountDefenderAssessment(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.recaptchaenterprise.v1beta1.RecaptchaEnterpriseProto .internal_static_google_cloud_recaptchaenterprise_v1beta1_AccountDefenderAssessment_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.recaptchaenterprise.v1beta1.RecaptchaEnterpriseProto .internal_static_google_cloud_recaptchaenterprise_v1beta1_AccountDefenderAssessment_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment.class, com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment.Builder.class); } /** * * * <pre> * Labels returned by account defender for this request. * </pre> * * Protobuf enum {@code * google.cloud.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel} */ public enum AccountDefenderLabel implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * Default unspecified type. * </pre> * * <code>ACCOUNT_DEFENDER_LABEL_UNSPECIFIED = 0;</code> */ ACCOUNT_DEFENDER_LABEL_UNSPECIFIED(0), /** * * * <pre> * The request matches a known good profile for the user. * </pre> * * <code>PROFILE_MATCH = 1;</code> */ PROFILE_MATCH(1), /** * * * <pre> * The request is potentially a suspicious login event and should be further * verified either via multi-factor authentication or another system. * </pre> * * <code>SUSPICIOUS_LOGIN_ACTIVITY = 2;</code> */ SUSPICIOUS_LOGIN_ACTIVITY(2), /** * * * <pre> * The request matched a profile that previously had suspicious account * creation behavior. This could mean this is a fake account. * </pre> * * <code>SUSPICIOUS_ACCOUNT_CREATION = 3;</code> */ SUSPICIOUS_ACCOUNT_CREATION(3), /** * * * <pre> * The account in the request has a high number of related accounts. It does * not necessarily imply that the account is bad but could require * investigating. * </pre> * * <code>RELATED_ACCOUNTS_NUMBER_HIGH = 4;</code> */ RELATED_ACCOUNTS_NUMBER_HIGH(4), UNRECOGNIZED(-1), ; /** * * * <pre> * Default unspecified type. * </pre> * * <code>ACCOUNT_DEFENDER_LABEL_UNSPECIFIED = 0;</code> */ public static final int ACCOUNT_DEFENDER_LABEL_UNSPECIFIED_VALUE = 0; /** * * * <pre> * The request matches a known good profile for the user. * </pre> * * <code>PROFILE_MATCH = 1;</code> */ public static final int PROFILE_MATCH_VALUE = 1; /** * * * <pre> * The request is potentially a suspicious login event and should be further * verified either via multi-factor authentication or another system. * </pre> * * <code>SUSPICIOUS_LOGIN_ACTIVITY = 2;</code> */ public static final int SUSPICIOUS_LOGIN_ACTIVITY_VALUE = 2; /** * * * <pre> * The request matched a profile that previously had suspicious account * creation behavior. This could mean this is a fake account. * </pre> * * <code>SUSPICIOUS_ACCOUNT_CREATION = 3;</code> */ public static final int SUSPICIOUS_ACCOUNT_CREATION_VALUE = 3; /** * * * <pre> * The account in the request has a high number of related accounts. It does * not necessarily imply that the account is bad but could require * investigating. * </pre> * * <code>RELATED_ACCOUNTS_NUMBER_HIGH = 4;</code> */ public static final int RELATED_ACCOUNTS_NUMBER_HIGH_VALUE = 4; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static AccountDefenderLabel valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static AccountDefenderLabel forNumber(int value) { switch (value) { case 0: return ACCOUNT_DEFENDER_LABEL_UNSPECIFIED; case 1: return PROFILE_MATCH; case 2: return SUSPICIOUS_LOGIN_ACTIVITY; case 3: return SUSPICIOUS_ACCOUNT_CREATION; case 4: return RELATED_ACCOUNTS_NUMBER_HIGH; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<AccountDefenderLabel> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<AccountDefenderLabel> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<AccountDefenderLabel>() { public AccountDefenderLabel findValueByNumber(int number) { return AccountDefenderLabel.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment.getDescriptor() .getEnumTypes() .get(0); } private static final AccountDefenderLabel[] VALUES = values(); public static AccountDefenderLabel valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private AccountDefenderLabel(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.cloud.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel) } public static final int LABELS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<java.lang.Integer> labels_; private static final com.google.protobuf.Internal.ListAdapter.Converter< java.lang.Integer, com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel> labels_converter_ = new com.google.protobuf.Internal.ListAdapter.Converter< java.lang.Integer, com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment .AccountDefenderLabel>() { public com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment .AccountDefenderLabel convert(java.lang.Integer from) { com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel result = com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment .AccountDefenderLabel.forNumber(from); return result == null ? com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment .AccountDefenderLabel.UNRECOGNIZED : result; } }; /** * * * <pre> * Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel labels = 1; * </code> * * @return A list containing the labels. */ @java.lang.Override public java.util.List< com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel> getLabelsList() { return new com.google.protobuf.Internal.ListAdapter< java.lang.Integer, com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel>( labels_, labels_converter_); } /** * * * <pre> * Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel labels = 1; * </code> * * @return The count of labels. */ @java.lang.Override public int getLabelsCount() { return labels_.size(); } /** * * * <pre> * Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel labels = 1; * </code> * * @param index The index of the element to return. * @return The labels at the given index. */ @java.lang.Override public com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel getLabels(int index) { return labels_converter_.convert(labels_.get(index)); } /** * * * <pre> * Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel labels = 1; * </code> * * @return A list containing the enum numeric values on the wire for labels. */ @java.lang.Override public java.util.List<java.lang.Integer> getLabelsValueList() { return labels_; } /** * * * <pre> * Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel labels = 1; * </code> * * @param index The index of the value to return. * @return The enum numeric value on the wire of labels at the given index. */ @java.lang.Override public int getLabelsValue(int index) { return labels_.get(index); } private int labelsMemoizedSerializedSize; private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (getLabelsList().size() > 0) { output.writeUInt32NoTag(10); output.writeUInt32NoTag(labelsMemoizedSerializedSize); } for (int i = 0; i < labels_.size(); i++) { output.writeEnumNoTag(labels_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < labels_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream.computeEnumSizeNoTag(labels_.get(i)); } size += dataSize; if (!getLabelsList().isEmpty()) { size += 1; size += com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(dataSize); } labelsMemoizedSerializedSize = dataSize; } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment)) { return super.equals(obj); } com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment other = (com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment) obj; if (!labels_.equals(other.labels_)) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getLabelsCount() > 0) { hash = (37 * hash) + LABELS_FIELD_NUMBER; hash = (53 * hash) + labels_.hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Account defender risk assessment. * </pre> * * Protobuf type {@code google.cloud.recaptchaenterprise.v1beta1.AccountDefenderAssessment} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.recaptchaenterprise.v1beta1.AccountDefenderAssessment) com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessmentOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.recaptchaenterprise.v1beta1.RecaptchaEnterpriseProto .internal_static_google_cloud_recaptchaenterprise_v1beta1_AccountDefenderAssessment_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.recaptchaenterprise.v1beta1.RecaptchaEnterpriseProto .internal_static_google_cloud_recaptchaenterprise_v1beta1_AccountDefenderAssessment_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment.class, com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment.Builder.class); } // Construct using com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; labels_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.recaptchaenterprise.v1beta1.RecaptchaEnterpriseProto .internal_static_google_cloud_recaptchaenterprise_v1beta1_AccountDefenderAssessment_descriptor; } @java.lang.Override public com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment getDefaultInstanceForType() { return com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment.getDefaultInstance(); } @java.lang.Override public com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment build() { com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment buildPartial() { com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment result = new com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment result) { if (((bitField0_ & 0x00000001) != 0)) { labels_ = java.util.Collections.unmodifiableList(labels_); bitField0_ = (bitField0_ & ~0x00000001); } result.labels_ = labels_; } private void buildPartial0( com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment) { return mergeFrom((com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment other) { if (other == com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment.getDefaultInstance()) return this; if (!other.labels_.isEmpty()) { if (labels_.isEmpty()) { labels_ = other.labels_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureLabelsIsMutable(); labels_.addAll(other.labels_); } onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { int tmpRaw = input.readEnum(); ensureLabelsIsMutable(); labels_.add(tmpRaw); break; } // case 8 case 10: { int length = input.readRawVarint32(); int oldLimit = input.pushLimit(length); while (input.getBytesUntilLimit() > 0) { int tmpRaw = input.readEnum(); ensureLabelsIsMutable(); labels_.add(tmpRaw); } input.popLimit(oldLimit); break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<java.lang.Integer> labels_ = java.util.Collections.emptyList(); private void ensureLabelsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { labels_ = new java.util.ArrayList<java.lang.Integer>(labels_); bitField0_ |= 0x00000001; } } /** * * * <pre> * Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel labels = 1; * </code> * * @return A list containing the labels. */ public java.util.List< com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel> getLabelsList() { return new com.google.protobuf.Internal.ListAdapter< java.lang.Integer, com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel>( labels_, labels_converter_); } /** * * * <pre> * Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel labels = 1; * </code> * * @return The count of labels. */ public int getLabelsCount() { return labels_.size(); } /** * * * <pre> * Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel labels = 1; * </code> * * @param index The index of the element to return. * @return The labels at the given index. */ public com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel getLabels(int index) { return labels_converter_.convert(labels_.get(index)); } /** * * * <pre> * Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel labels = 1; * </code> * * @param index The index to set the value at. * @param value The labels to set. * @return This builder for chaining. */ public Builder setLabels( int index, com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel value) { if (value == null) { throw new NullPointerException(); } ensureLabelsIsMutable(); labels_.set(index, value.getNumber()); onChanged(); return this; } /** * * * <pre> * Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel labels = 1; * </code> * * @param value The labels to add. * @return This builder for chaining. */ public Builder addLabels( com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel value) { if (value == null) { throw new NullPointerException(); } ensureLabelsIsMutable(); labels_.add(value.getNumber()); onChanged(); return this; } /** * * * <pre> * Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel labels = 1; * </code> * * @param values The labels to add. * @return This builder for chaining. */ public Builder addAllLabels( java.lang.Iterable< ? extends com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment .AccountDefenderLabel> values) { ensureLabelsIsMutable(); for (com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel value : values) { labels_.add(value.getNumber()); } onChanged(); return this; } /** * * * <pre> * Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel labels = 1; * </code> * * @return This builder for chaining. */ public Builder clearLabels() { labels_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel labels = 1; * </code> * * @return A list containing the enum numeric values on the wire for labels. */ public java.util.List<java.lang.Integer> getLabelsValueList() { return java.util.Collections.unmodifiableList(labels_); } /** * * * <pre> * Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel labels = 1; * </code> * * @param index The index of the value to return. * @return The enum numeric value on the wire of labels at the given index. */ public int getLabelsValue(int index) { return labels_.get(index); } /** * * * <pre> * Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel labels = 1; * </code> * * @param index The index to set the value at. * @param value The enum numeric value on the wire for labels to set. * @return This builder for chaining. */ public Builder setLabelsValue(int index, int value) { ensureLabelsIsMutable(); labels_.set(index, value); onChanged(); return this; } /** * * * <pre> * Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel labels = 1; * </code> * * @param value The enum numeric value on the wire for labels to add. * @return This builder for chaining. */ public Builder addLabelsValue(int value) { ensureLabelsIsMutable(); labels_.add(value); onChanged(); return this; } /** * * * <pre> * Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1beta1.AccountDefenderAssessment.AccountDefenderLabel labels = 1; * </code> * * @param values The enum numeric values on the wire for labels to add. * @return This builder for chaining. */ public Builder addAllLabelsValue(java.lang.Iterable<java.lang.Integer> values) { ensureLabelsIsMutable(); for (int value : values) { labels_.add(value); } onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.recaptchaenterprise.v1beta1.AccountDefenderAssessment) } // @@protoc_insertion_point(class_scope:google.cloud.recaptchaenterprise.v1beta1.AccountDefenderAssessment) private static final com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment(); } public static com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AccountDefenderAssessment> PARSER = new com.google.protobuf.AbstractParser<AccountDefenderAssessment>() { @java.lang.Override public AccountDefenderAssessment parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<AccountDefenderAssessment> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AccountDefenderAssessment> getParserForType() { return PARSER; } @java.lang.Override public com.google.recaptchaenterprise.v1beta1.AccountDefenderAssessment getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/jena
35,786
jena-arq/src/test/java/org/apache/jena/sparql/expr/TestExpressions.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.sparql.expr; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import java.math.BigDecimal; import java.math.BigInteger; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import org.apache.jena.datatypes.xsd.XSDDatatype; import org.apache.jena.graph.NodeFactory; import org.apache.jena.query.Query; import org.apache.jena.query.QueryFactory; import org.apache.jena.query.QueryParseException; import org.apache.jena.sparql.core.Var; import org.apache.jena.sparql.engine.binding.Binding; import org.apache.jena.sparql.engine.binding.BindingFactory; import org.apache.jena.sparql.function.FunctionEnv; import org.apache.jena.sparql.function.FunctionEnvBase; import org.apache.jena.sparql.util.ExprUtils; import org.apache.jena.sys.JenaSystem; import org.apache.jena.vocabulary.RDF; import org.apache.jena.vocabulary.XSD; /** Break expression testing suite into parts * @see TestExpressions * @see TestExpressions2 * @see TestExpressions3 * @see TestExprLib * @see TestNodeValue */ public class TestExpressions { static { JenaSystem.init(); } public final static int NO_FAILURE = 100; public final static int PARSE_FAIL = 250; // Parser should catch it. public final static int EVAL_FAIL = 200; // Parser should pass it but eval should fail it static boolean flagVerboseWarning; @BeforeAll static public void beforeClass() { flagVerboseWarning = NodeValue.VerboseWarnings; NodeValue.VerboseWarnings = false; } @AfterAll static public void afterClass() { NodeValue.VerboseWarnings = flagVerboseWarning; } @Test public void var_1() { testVar("?x", "x"); } @Test public void var_2() { testVar("$x", "x"); } @Test public void var_3() { testVar("?name", "name"); } @Test public void var_4() { testVar("$name", "name"); } @Test public void var_5() { testVar("?x_", "x_"); } @Test public void var_6() { testVar("?x.", "x"); } @Test public void var_7() { testVar("?x.x", "x"); } @Test public void var_8() { testVar("?0", "0"); } @Test public void var_9() { testVar("?0x", "0x"); } @Test public void var_10() { testVar("?x0", "x0"); } @Test public void var_11() { testVar("?_", "_"); } @Test public void syntax_good_1() { testSyntax("?x11"); } @Test public void syntax_good_2() { testSyntax("1+2"); } @Test public void syntax_bad_2() { assertThrows(QueryParseException.class, ()-> testSyntax("1:b") ); } @Test public void syntax_bad_3() { assertThrows(QueryParseException.class, ()-> testSyntax("?") ); } @Test public void syntax_bad_4() { assertThrows(QueryParseException.class, ()-> testSyntax("??") ); } @Test public void syntax_bad_5() { assertThrows(QueryParseException.class, ()-> testSyntax("?.") ); } @Test public void syntax_bad_6() { assertThrows(QueryParseException.class, ()-> testSyntax("?#") ); } @Test public void syntax_bad_7() { assertThrows(QueryParseException.class, ()-> testSyntax("_:") ); } @Test public void syntax_bad_8() { assertThrows(QueryParseException.class, ()-> testSyntax("[]") ); } @Test public void numeric_1() { testNumeric("7", 7); } @Test public void numeric_2() { testNumeric("-3", -3); } @Test public void numeric_3() { testNumeric("+2", 2); } @Test public void numeric_4() { assertThrows(QueryParseException.class, ()-> testNumeric("0xF", 0xF) ); } @Test public void numeric_5() { assertThrows(QueryParseException.class, ()-> testNumeric("0x12", 0x12) ); } @Test public void numeric_6() { testNumeric("3--4", 3-(-4)); } @Test public void numeric_7() { testNumeric("3++4", 3+(+4)); } @Test public void numeric_8() { testNumeric("3-+4", 3-+4); } @Test public void numeric_9() { testNumeric("3+-4", 3+-4); } @Test public void numeric_10() { testNumeric("3-(-4)", 3-(-4)); } @Test public void numeric_11() { testNumeric("3+4+5", 3+4+5); } @Test public void numeric_12() { testNumeric("(3+4)+5", 3+4+5); } @Test public void numeric_13() { testNumeric("3+(4+5)", 3+4+5); } @Test public void numeric_14() { testNumeric("3*4+5", 3*4+5); } @Test public void numeric_15() { testNumeric("3*(4+5)", 3*(4+5)); } @Test public void numeric_16() { testNumeric("10-3-5", 10-3-5); } @Test public void numeric_17() { testNumeric("(10-3)-5", (10-3)-5); } @Test public void numeric_18() { testNumeric("10-(3-5)", 10-(3-5)); } @Test public void numeric_19() { testNumeric("10-3+5", 10-3+5); } @Test public void numeric_20() { testNumeric("10-(3+5)", 10-(3+5)); } @Test public void numeric_21() { assertThrows(QueryParseException.class, ()-> testNumeric("1<<2", 1<<2) ); } @Test public void numeric_22() { assertThrows(QueryParseException.class, ()-> testNumeric("1<<2<<2", 1<<2<<2) ); } @Test public void numeric_23() { assertThrows(QueryParseException.class, ()-> testNumeric("10000>>2", 10000>>2) ); } @Test public void numeric_24() { testNumeric("1.5 + 2.5", 1.5+2.5); } @Test public void numeric_25() { testNumeric("1.5 + 2", 1.5+2); } @Test public void numeric_26() { testNumeric("4111222333444", 4111222333444L); } @Test public void numeric_27() { testNumeric("1234 + 4111222333444", 1234 + 4111222333444L); } @Test public void numeric_28() { testNumeric("+2.5", new BigDecimal("+2.5")); } @Test public void numeric_29() { testNumeric("-2.5", new BigDecimal("-2.5")); } @Test public void numeric_30() { testNumeric("10000000000000000000000000000+1", new BigInteger("10000000000000000000000000001")); } @Test public void numeric_31() { testNumeric("-10000000000000000000000000000+1", new BigInteger("-9999999999999999999999999999")); } @Test public void boolean_1() { testBoolean("4111222333444 > 1234", 4111222333444L > 1234); } @Test public void boolean_2() { testBoolean("4111222333444 < 1234", 4111222333444L < 1234L); } @Test public void boolean_3() { testBoolean("1.5 < 2", 1.5 < 2 ); } @Test public void boolean_4() { testBoolean("1.5 > 2", 1.5 > 2 ); } @Test public void boolean_5() { testBoolean("1.5 < 2.3", 1.5 < 2.3 ); } @Test public void boolean_6() { testBoolean("1.5 > 2.3", 1.5 > 2.3 ); } @Test public void boolean_7() { testBoolean("'true'^^<"+XSDDatatype.XSDboolean.getURI()+">", true); } @Test public void boolean_8() { testBoolean("'1'^^<"+XSDDatatype.XSDboolean.getURI()+">", true); } @Test public void boolean_9() { testBoolean("'false'^^<"+XSDDatatype.XSDboolean.getURI()+">", false); } @Test public void boolean_10() { testBoolean("'0'^^<"+XSDDatatype.XSDboolean.getURI()+">", false); } @Test public void boolean_11() { testBoolean("1 || false", true); } @Test public void boolean_12() { testBoolean("'foo' || false", true); } @Test public void boolean_13() { testBoolean("0 || false", false); } @Test public void boolean_14() { testBoolean("'' || false", false); } @Test public void boolean_15() { assertThrows(ExprEvalException.class, ()-> testEval("!'junk'^^<urn:unknown:uri>") ); } @Test public void boolean_16() { testBoolean("2 < 3", 2 < 3); } @Test public void boolean_17() { testBoolean("2 > 3", 2 > 3); } @Test public void boolean_18() { testBoolean("(2 < 3) && (3<4)", (2 < 3) && (3<4)); } @Test public void boolean_19() { testBoolean("(2 < 3) && (3>=4)", (2 < 3) && (3>=4)); } @Test public void boolean_20() { testBoolean("(2 < 3) || (3>=4)", (2 < 3) || (3>=4)); } // ?x is unbound in the next few tests @Test public void boolean_21() { testBoolean("(2 < 3) || ?x > 2", true); } @Test public void boolean_22() { assertThrows(ExprEvalException.class, ()-> testEval("(2 > 3) || ?x > 2") ); } @Test public void boolean_23() { testBoolean("(2 > 3) && ?x > 2", false); } @Test public void boolean_24() { assertThrows(ExprEvalException.class, ()-> testEval("(2 < 3) && ?x > 2") ); } @Test public void boolean_25() { testBoolean("?x > 2 || (2 < 3)", true); } @Test public void boolean_26() { assertThrows(ExprEvalException.class, ()-> testEval("?x > 2 || (2 > 3)") ); } @Test public void boolean_27() { assertThrows(ExprEvalException.class, ()-> testEval("?x > 2 && (2 < 3)") ); } @Test public void boolean_28() { testBoolean("?x > 2 && (2 > 3)", false); } @Test public void boolean_29() { assertThrows(ExprEvalException.class, ()-> testEval("! ?x ") ); } @Test public void boolean_30() { testBoolean("! true ", false); } @Test public void boolean_31() { testBoolean("! false ", true); } @Test public void boolean_32() { testBoolean("2 = 3", 2 == 3); } @Test public void boolean_33() { testBoolean("!(2 = 3)", !(2 == 3)); } @Test public void boolean_34() { testBoolean("'2' = 2", false); } @Test public void boolean_35() { testBoolean("2 = '2'", false); } @Test public void boolean_36() { assertThrows(ExprEvalException.class, ()-> testEval("2 < '3'") ); } @Test public void boolean_37() { assertThrows(ExprEvalException.class, ()-> testEval("'2' < 3") ); } @Test public void boolean_38() { testBoolean("\"fred\" != \"joe\"", true ); } @Test public void boolean_39() { testBoolean("\"fred\" = \"joe\"", false ); } @Test public void boolean_40() { testBoolean("\"fred\" = \"fred\"", true ); } @Test public void boolean_41() { testBoolean("\"fred\" = 'fred'", true ); } @Test public void boolean_42() { testBoolean("true = true", true); } @Test public void boolean_43() { testBoolean("false = false", true); } @Test public void boolean_44() { testBoolean("true = false", false); } @Test public void boolean_45() { testBoolean("true > true", false); } @Test public void boolean_46() { testBoolean("true >= false", true); } @Test public void boolean_47() { testBoolean("false > false", false); } @Test public void boolean_48() { testBoolean("false >= false", true); } @Test public void boolean_49() { testBoolean("true > false", true); } @Test public void boolean_50() { testBoolean("1 = true", false); } @Test public void boolean_51() { testBoolean("1 != true", true); } @Test public void boolean_52() { testBoolean("'a' != false", true); } @Test public void boolean_53() { testBoolean("0 != false", true); } @Test public void boolean_54() { testBoolean(dateTime1+" = "+dateTime2, true); } @Test public void boolean_55() { testBoolean(dateTime1+" <= "+dateTime2, true); } @Test public void boolean_56() { testBoolean(dateTime1+" >= "+dateTime2, true); } @Test public void boolean_57() { testBoolean(dateTime3+" < "+dateTime1, true); } @Test public void boolean_58() { testBoolean(dateTime3+" > "+dateTime1, false); } @Test public void boolean_59() { testBoolean(dateTime4+" < "+dateTime1, false); } @Test public void boolean_60() { testBoolean(dateTime4+" > "+dateTime1, true); } @Test public void boolean_61() { testBoolean(time1+" = "+time2, true); } @Test public void boolean_62() { testBoolean(time1+" <= "+time2, true); } @Test public void boolean_63() { testBoolean(time1+" >= "+time2, true); } @Test public void boolean_64() { testBoolean(time3+" < "+time2, false); } @Test public void boolean_65() { testBoolean(time3+" > "+time2, true); } @Test public void boolean_66() { testBoolean(time4+" < "+time2, true); } @Test public void boolean_67() { testBoolean(time4+" > "+time2, false); } // xsd:dateTimeStamp static String dateTimeStamp1 = "'1999-10-26T19:32:52+00:00'^^<"+XSDDatatype.XSDdateTimeStamp.getURI()+">"; static String dateTimeStamp2 = "'2000-01-01T00:00:00+00:00'^^<"+XSDDatatype.XSDdateTimeStamp.getURI()+">"; @Test public void boolean_68() { testBoolean(dateTimeStamp1+" < "+dateTimeStamp2, true); } @Test public void boolean_70() { testBoolean("isNumeric(12)", true); } @Test public void boolean_71() { testBoolean("isNumeric('12')", false); } @Test public void boolean_72() { testBoolean("isNumeric('12'^^<"+XSDDatatype.XSDbyte.getURI()+">)", true); } @Test public void boolean_73() { testBoolean("isNumeric('1200'^^<"+XSDDatatype.XSDbyte.getURI()+">)", false); } @Test public void boolean_74() { assertThrows(ExprEvalException.class, ()-> testBoolean("isNumeric(?x)", true) ); } // 24:00:00 // Equal static String dateTime1999_24 = "'1999-12-31T24:00:00Z'^^<"+XSDDatatype.XSDdateTime.getURI()+">"; static String dateTime2000_00 = "'2000-01-01T00:00:00Z'^^<"+XSDDatatype.XSDdateTime.getURI()+">"; static String time_24 = "'24:00:00'^^<"+XSDDatatype.XSDtime.getURI()+">"; static String time_00 = "'00:00:00'^^<"+XSDDatatype.XSDtime.getURI()+">"; @Test public void dateTime24_01() { testBoolean(dateTime1999_24+" = "+dateTime2000_00 , true); } @Test public void time24_01() { testBoolean(time_24+" = "+time_00 , true); } static String duration1 = "'P1Y1M1DT1H1M1S"+"'^^<"+XSDDatatype.XSDduration.getURI()+">"; static String duration2 = "'P2Y1M1DT1H1M1S"+"'^^<"+XSDDatatype.XSDduration.getURI()+">"; static String duration3 = "'P1Y1M1DT1H1M1S"+"'^^<"+XSDDatatype.XSDduration.getURI()+">"; static String duration4 = "'PT1H1M1S"+"'^^<"+XSDDatatype.XSDduration.getURI()+">"; static String duration5 = "'PT1H1M1.9S"+"'^^<"+XSDDatatype.XSDduration.getURI()+">"; static String duration5a = "'PT61M1.9S"+"'^^<"+XSDDatatype.XSDduration.getURI()+">"; static String duration5b = "'PT3661.9S"+"'^^<"+XSDDatatype.XSDduration.getURI()+">"; static String duration7 = "'-PT1H"+"'^^<"+XSDDatatype.XSDduration.getURI()+">"; static String duration8 = "'PT0H0M0S"+"'^^<"+XSDDatatype.XSDduration.getURI()+">"; @Test public void duration_01() { testBoolean(duration1+" = "+duration1, true); } // Extended - these are not dayTime nor yearMonth. @Test public void duration_02() { testBoolean(duration1+" < "+duration2, true); } @Test public void duration_03() { testBoolean(duration1+" > "+duration2, false); } @Test public void duration_04() { testBoolean(duration1+" < "+duration2, true); } @Test public void duration_05() { testBoolean(duration1+" = "+duration3, true); } @Test public void duration_06() { testBoolean(duration1+" <= "+duration3, true); } @Test public void duration_07() { testBoolean(duration1+" >= "+duration3, true); } @Test public void duration_08() { testBoolean(duration7+" < "+duration8, true); } // duration5* are the same duration length, written differently @Test public void duration_10() { testBoolean(duration5+" > "+duration4, true); } @Test public void duration_11() { testBoolean(duration5a+" = "+duration5, true); } @Test public void duration_12() { testBoolean(duration5a+" = "+duration5b, true); } @Test public void duration_13() { testBoolean(duration5b+" = "+duration5, true); } @Test public void duration_14() { testBoolean(duration5a+" > "+duration4, true); } @Test public void URI_1() { testURI("<a>", baseNS+"a" ); } @Test public void URI_2() { testURI("<a\\u00E9>", baseNS+"a\u00E9" ); } @Test public void URI_3() { testURI("ex:b", exNS+"b" ); } @Test public void URI_4() { testURI("ex:b_", exNS+"b_" ); } @Test public void URI_5() { testURI("ex:a_b", exNS+"a_b" ); } @Test public void URI_6() { testURI("ex:", exNS ); } @Test public void URI_7() { assertThrows(QueryParseException.class, ()-> testURI("x.:", xNS) ); } @Test public void URI_8() { testURI("rdf:_2", rdfNS+"_2" ); } @Test public void URI_9() { testURI("rdf:__2", rdfNS+"__2" ); } @Test public void URI_10() { testURI(":b", dftNS+"b" ); } @Test public void URI_11() { testURI(":", dftNS ); } // These assume \-u processing by JavaCC // Migration to processing like Turtle, in strings and URIs only. // @Test public void URI_12() { testURI(":\\u00E9", dftNS+"\u00E9" ); } // @Test public void URI_13() { testURI("\\u0065\\u0078:", exNS ); } @Test public void URI_14() { testURI("select:a", selNS+"a" ); } @Test public void URI_15() { testURI("ex:a.", exNS+"a"); } @Test public void URI_16() { testURI("ex:a.a", exNS+"a.a"); } @Test public void URI_17() { assertThrows(QueryParseException.class, ()-> testURI("x.:a.a", xNS+"a.a") ); } @Test public void URI_18() { testURI("ex:2", exNS+"2" ); } @Test public void URI_19() { testURI("ex:2ab_c", exNS+"2ab_c" ); } @Test public void boolean_76() { testBoolean("'fred'@en = 'fred'", false ); } @Test public void boolean_77() { testBoolean("'fred'@en = 'bert'", false ); } @Test public void boolean_78() { testBoolean("'fred'@en != 'fred'", true ); } @Test public void boolean_79() { testBoolean("'fred'@en != 'bert'", true ); } @Test public void boolean_80() { testBoolean("'chat'@en = 'chat'@fr", false ); } @Test public void boolean_81() { testBoolean("'chat'@en = 'maison'@fr", false ); } @Test public void boolean_82() { testBoolean("'chat'@en != 'chat'@fr", true ); } @Test public void boolean_83() { testBoolean("'chat'@en != 'maison'@fr", true ); } @Test public void boolean_84() { testBoolean("'chat'@en = 'chat'@EN", true ); } @Test public void boolean_85() { testBoolean("'chat'@en = 'chat'@en-uk", false ); } @Test public void boolean_86() { testBoolean("'chat'@en != 'chat'@EN", false ); } @Test public void boolean_87() { testBoolean("'chat'@en != 'chat'@en-uk", true ); } @Test public void boolean_88() { testBoolean("'chat'@en = <http://example/>", false ); } @Test public void URI_20() { assertThrows(QueryParseException.class, ()-> testURI("()", RDF.nil.getURI()) ); } @Test public void boolean_89() { testBoolean("'fred'^^<type1> = 'fred'^^<type1>", true ); } @Test public void boolean_90() { assertThrows(ExprEvalException.class, ()-> testEval("'fred'^^<type1> != 'joe'^^<type1>" ) ); } @Test public void boolean_91() { assertThrows(ExprEvalException.class, ()-> testEval("'fred'^^<type1> = 'fred'^^<type2>" ) ); } @Test public void boolean_92() { assertThrows(ExprEvalException.class, ()-> testEval("'fred'^^<type1> != 'joe'^^<type2>" ) ); } @Test public void boolean_93() { testBoolean("'fred'^^<"+XSDDatatype.XSDstring.getURI()+"> = 'fred'", true ); } @Test public void boolean_94() { assertThrows(ExprEvalException.class, ()-> testEval("'fred'^^<type1> = 'fred'" ) ); } @Test public void boolean_95() { assertThrows(ExprEvalException.class, ()-> testEval("'fred'^^<type1> != 'fred'" ) ); } @Test public void boolean_96() { assertThrows(ExprEvalException.class, ()-> testBoolean("'21'^^<int> = '21'", true ) ); } @Test public void numeric_51() { testNumeric("'21'^^<"+XSDDatatype.XSDinteger.getURI()+">", 21); } @Test public void boolean_97() { testBoolean("'21'^^<"+XSDDatatype.XSDinteger.getURI()+"> = 21", true); } @Test public void boolean_98() { testBoolean("'21'^^<"+XSDDatatype.XSDinteger.getURI()+"> = 22", false); } @Test public void boolean_99() { testBoolean("'21'^^<"+XSDDatatype.XSDinteger.getURI()+"> != 21", false); } @Test public void boolean_100() { testBoolean("'21'^^<"+XSDDatatype.XSDinteger.getURI()+"> != 22", true); } @Test public void boolean_101() { assertThrows(ExprEvalException.class, ()-> testEval("'x'^^<type1> = 21") ); } @Test public void boolean_102() { assertThrows(ExprEvalException.class, ()-> testEval("'x'^^<type1> != 21") ); } @Test public void boolean_103() { assertThrows(ExprEvalException.class, ()-> testEval("'x'^^<http://example/unknown> = true") ); } @Test public void boolean_104() { assertThrows(ExprEvalException.class, ()-> testEval("'x'^^<http://example/unknown> != true") ); } @Test public void boolean_105() { testBoolean("'x'^^<http://example/unknown> = 'x'^^<http://example/unknown>", true); } @Test public void boolean_106() { assertThrows(ExprEvalException.class, ()-> testEval("'x'^^<http://example/unknown> = 'y'^^<http://example/unknown>") ); } @Test public void boolean_107() { testBoolean("'x'^^<http://example/unknown> != 'x'^^<http://example/unknown>", false); } @Test public void boolean_108() { assertThrows(ExprEvalException.class, ()-> testEval("'x'^^<http://example/unknown> != 'y'^^<http://example/unknown>") ); } @Test public void string_1() { testString("'a\\nb'", "a\nb"); } @Test public void string_2() { testString("'a\\n'", "a\n"); } @Test public void string_3() { testString("'\\nb'", "\nb"); } @Test public void string_4() { testString("'a\\tb'", "a\tb"); } @Test public void string_5() { testString("'a\\bb'", "a\bb"); } @Test public void string_6() { testString("'a\\rb'", "a\rb"); } @Test public void string_7() { testString("'a\\fb'", "a\fb"); } @Test public void string_8() { testString("'a\\\\b'", "a\\b"); } @Test public void string_9() { testString("'a\\u0020a'", "a a"); } @Test public void string_10() { testString("'a\\uF021'", "a\uF021"); } @Test public void string_11() { testString("'a\\U0000F021'", "a\uF021"); } @Test public void string_bad_1() { assertThrows(QueryParseException.class, ()-> testString("'a\\X'") ); } @Test public void string_bad_2() { assertThrows(QueryParseException.class, ()-> testString("'aaa\\'") ); } @Test public void string_bad_3() { assertThrows(QueryParseException.class, ()-> testString("'\\u'") ); } @Test public void string_bad_4() { assertThrows(QueryParseException.class, ()-> testString("'\\u111'") ); } // @Test public void boolean_109() { testBoolean("\"fred\\1\" = 'fred1'", false ); } // @Test public void boolean_110() { testBoolean("\"fred2\" = 'fred\\2'", true ); } @Test public void boolean_111() { testBoolean("'fred\\\\3' != \"fred3\"", true ); } @Test public void boolean_112() { testBoolean("'urn:ex:fred' = <urn:ex:fred>" , false); } @Test public void boolean_113() { testBoolean("'urn:ex:fred' != <urn:ex:fred>" , true); } @Test public void boolean_114() { testBoolean("'urn:ex:fred' = <urn:ex:fred>", false ); } @Test public void boolean_115() { testBoolean("'urn:ex:fred' != <urn:ex:fred>", true ); } @Test public void boolean_116() { testBoolean("REGEX('aabbcc', 'abbc')", true ); } @Test public void boolean_117() { testBoolean("REGEX('aabbcc' , 'a..c')", true ); } @Test public void boolean_118() { testBoolean("REGEX('aabbcc' , '^aabb')", true ); } @Test public void boolean_119() { testBoolean("REGEX('aabbcc' , 'cc$')", true ); } @Test public void boolean_120() { testBoolean("! REGEX('aabbcc' , 'abbc')", false ); } @Test public void boolean_121() { testBoolean("REGEX('aa\\\\cc', '\\\\\\\\')", true ); } @Test public void boolean_122() { testBoolean("REGEX('aab*bcc', 'ab\\\\*bc')", true ); } @Test public void boolean_123() { testBoolean("REGEX('aabbcc', 'ab\\\\\\\\*bc')", true ); } @Test public void boolean_124() { testBoolean("REGEX('aabbcc', 'B.*B', 'i')", true ); } @Test public void boolean_125() { assertThrows(ExprEvalException.class, ()-> testEval("2 < 'fred'") ); } @Test public void boolean_126() { testBoolean("datatype('fred') = <"+XSD.xstring.getURI()+">", true); } @Test public void boolean_127() { testBoolean("datatype('fred'^^<urn:test:foo>) = <urn:test:foo>", true); } @Test public void boolean_128() { testBoolean("datatype('fred'^^<foo>) = <Foo>", false); } @Test public void lang_01() { testString("LANG('tea time'@en)", "en"); } // Aside For some strange reason, the language code is GB not UK. // "The United Kingdom of Great Britain and Norther Ireland." // The four countries England, Scotland, Wales and Northern Ireland (since 1922). // It's complicated: https://en.wikipedia.org/wiki/United_Kingdom @Test public void lang_02() { testString("LANG('tea time'@en-gb)", "en-GB"); } @Test public void lang_03() { testString("LANG('tea time')", ""); } @Test public void lang_04() { testBoolean("hasLANG('tea time'@en-gb)", true); } @Test public void lang_05() { testBoolean("hasLANG('tea time')", false); } // hasLANG hasLANGDIR LANG LANGDIR STRLANGDIR @Test public void langdir_01() { testBoolean("hasLANGDIR('coffee time')", false); } @Test public void langdir_02() { testString("LANGDIR('coffee time')", ""); } @Test public void langdir_03() { testSyntax("STRLANGDIR('abc', 'fr', 'ltr')"); } @Test public void langdir_04() { testBoolean("hasLANGDIR( STRLANGDIR('abc', 'fr', 'ltr') )", true); } @Test public void langdir_05() { testString("LANGDIR( STRLANGDIR('abc', 'fr', 'ltr') )", "ltr"); } @Test public void langdir_06() { testString("LANG( STRLANGDIR('abc', 'fr', 'ltr') )", "fr"); } @Test public void langdir_07() { testString("LANGDIR( STRLANG('abc', 'fr--ltr') )", ""); } @Test public void langmatches_01() { testBoolean("LANGMATCHES('EN', 'en')", true); } @Test public void langmatches_02() { testBoolean("LANGMATCHES('en', 'en')", true); } @Test public void langmatches_03() { testBoolean("LANGMATCHES('EN', 'EN')", true); } @Test public void langmatches_04() { testBoolean("LANGMATCHES('en', 'EN')", true); } @Test public void langmatches_05() { testBoolean("LANGMATCHES('fr', 'EN')", false); } @Test public void langmatches_06() { testBoolean("LANGMATCHES('en', 'en-gb')", false); } @Test public void langmatches_07() { testBoolean("LANGMATCHES('en-GB', 'en-GB')", true); } @Test public void langmatches_08() { testBoolean("LANGMATCHES('en-Latn-gb', 'en-Latn')", true); } @Test public void langmatches_09() { testBoolean("LANGMATCHES('en-gb', 'en-Latn')", false); } @Test public void langmatches_10() { testBoolean("LANGMATCHES('', '*')", false); } @Test public void langmatches_11() { testBoolean("LANGMATCHES('en-us', '*')", true); } // RDF 1.2: triple terms. @Test public void tripleterm_01() { testEval("TRIPLE(<x:s>, <x:p>, 123)"); } @Test public void tripleterm_02() { testURI("SUBJECT( TRIPLE(<x:s>, <x:p>, 123) )", "x:s"); } @Test public void tripleterm_03() { testURI("PREDICATE( TRIPLE(<x:s>, <x:p>, 123) )", "x:p"); } @Test public void tripleterm_03a() { assertThrows(QueryParseException.class, ()-> testURI("PROPERTY( TRIPLE(<x:s>, <x:p>, 123) )", "x:p") ); } @Test public void tripleterm_04() { testNumeric("OBJECT( TRIPLE(<x:s>, <x:p>, 123) )", 123); } @Test public void boolean_129() { testBoolean("isURI(?x)", true, env); } @Test public void boolean_130() { testBoolean("isURI(?a)", false, env); } @Test public void boolean_131() { testBoolean("isURI(?b)", false, env); } // ?y is unbound @Test public void boolean_132() { assertThrows(ExprEvalException.class, ()-> testBoolean("isURI(?y)", false, env) ); } @Test public void boolean_133() { testBoolean("isURI(<urn:test:foo>)", true, env); } @Test public void boolean_134() { testBoolean("isURI('bar')", false, env); } @Test public void boolean_135() { testBoolean("isLiteral(?x)", false, env); } @Test public void boolean_136() { testBoolean("isLiteral(?a)", true, env); } @Test public void boolean_137() { testBoolean("isLiteral(?b)", false, env); } @Test public void boolean_138() { assertThrows(ExprEvalException.class, ()-> testBoolean("isLiteral(?y)", false, env) ); } @Test public void boolean_139() { testBoolean("isBlank(?x)", false, env); } @Test public void boolean_140() { testBoolean("isBlank(?a)", false, env); } @Test public void boolean_141() { testBoolean("isBlank(?b)", true, env); } @Test public void boolean_142() { assertThrows(ExprEvalException.class, ()-> testBoolean("isBlank(?y)", false, env) ); } @Test public void boolean_143() { testBoolean("bound(?a)", true, env); } @Test public void boolean_144() { testBoolean("bound(?b)", true, env); } @Test public void boolean_145() { testBoolean("bound(?x)", true, env); } @Test public void boolean_146() { testBoolean("bound(?y)", false, env); } @Test public void string_18() { testString("str(<urn:ex:x>)", "urn:ex:x"); } @Test public void string_19() { testString("str('')", ""); } @Test public void string_20() { testString("str(15)", "15"); } @Test public void string_21() { testString("str('15.20'^^<"+XSDDatatype.XSDdouble.getURI()+">)", "15.20"); } @Test public void string_22() { testString("str('lex'^^<x:unknown>)", "lex"); } @Test public void boolean_147() { testBoolean("sameTerm(1, 1)", true, env); } @Test public void boolean_148() { testBoolean("sameTerm(1, 1.0)", false, env); } @Test public void numeric_52() { testNumeric("<"+xsd+"integer>('3')", 3); } @Test public void numeric_53() { testNumeric("<"+xsd+"byte>('3')", 3); } @Test public void numeric_54() { testNumeric("<"+xsd+"int>('3')", 3); } @Test public void boolean_149() { testBoolean("<"+xsd+"double>('3') = 3", true); } @Test public void boolean_150() { testBoolean("<"+xsd+"float>('3') = 3", true); } @Test public void boolean_151() { testBoolean("<"+xsd+"double>('3') = <"+xsd+"float>('3')", true); } @Test public void boolean_152() { testBoolean("<"+xsd+"double>(str('3')) = 3", true); } @Test public void string_23() { testString("'a'+'b'", "ab"); } // Not strict @Test public void string_24() { assertThrows(ExprEvalException.class, ()-> testString("'a'+12") ); } public void string_25() { testString("12+'a'"); } public void string_26() { testString("<uri>+'a'"); } static String dateTime1 = "'2005-02-25T12:03:34Z'^^<"+XSDDatatype.XSDdateTime.getURI()+">"; static String dateTime2 = "'2005-02-25T12:03:34Z'^^<"+XSDDatatype.XSDdateTime.getURI()+">"; // Earlier static String dateTime3 = "'2005-01-01T12:03:34Z'^^<"+XSDDatatype.XSDdateTime.getURI()+">"; // Later static String dateTime4 = "'2005-02-25T13:00:00Z'^^<"+XSDDatatype.XSDdateTime.getURI()+">"; static String time1 = "'12:03:34Z'^^<" + XSDDatatype.XSDtime.getURI() + ">"; static String time2 = "'12:03:34Z'^^<" + XSDDatatype.XSDtime.getURI() + ">"; static String time3 = "'13:00:00Z'^^<" + XSDDatatype.XSDtime.getURI() + ">"; static String time4 = "'11:03:34Z'^^<" + XSDDatatype.XSDtime.getURI() + ">"; static String exNS = "http://example.org/"; static String xNS = "http://example.org/dot#"; static String selNS = "http://select/"; static String dftNS = "http://default/"; static String baseNS = "http://base/"; static String rdfNS = RDF.getURI(); static Query query = QueryFactory.make(); static { query.setBaseURI(baseNS); query.setPrefix("ex", exNS); query.setPrefix("rdf", RDF.getURI()); query.setPrefix("x.", xNS); query.setPrefix("", dftNS); query.setPrefix("select", selNS); } static String xsd = XSDDatatype.XSD+"#"; static Binding env = BindingFactory.binding(Var.alloc("a"), NodeFactory.createLiteralString("A"), Var.alloc("b"), NodeFactory.createBlankNode(), Var.alloc("x"), NodeFactory.createURI("urn:ex:abcd")); // Parse and ensure the whole string was used. private static Expr parseToEnd(String exprString) { return ExprUtils.parse(query, exprString, true); } // Parse, stopping when the expression ends. private static Expr parseAny(String exprString) { return ExprUtils.parse(query, exprString, false); } private static void testVar(String string, String rightVarName) { Expr expr = parseAny(string); assertTrue(expr.isVariable(), ()->"Not a NodeVar: " + expr); ExprVar v = (ExprVar)expr; assertEquals(rightVarName, v.getVarName(), ()->"Different variable names"); } private static void testSyntax(String exprString) { parseToEnd(exprString); } // "should evaluate", don't care what the result is. private static void testEval(String string) { Expr expr = parseToEnd(string); Binding binding = BindingFactory.empty(); FunctionEnv env = new FunctionEnvBase(); NodeValue v = expr.eval(binding, env); } // All value testing should be parseToEnd private static void testNumeric(String string, int i) { Expr expr = parseToEnd(string); Binding binding = BindingFactory.empty(); FunctionEnv env = new FunctionEnvBase(); NodeValue v = expr.eval(binding, env); assertTrue(v.isInteger()); assertEquals(i, v.getInteger().intValue()); } private static void testNumeric(String string, BigDecimal decimal) { Expr expr = parseToEnd(string); Binding binding = BindingFactory.empty(); FunctionEnv env = new FunctionEnvBase(); NodeValue v = expr.eval(binding, env); assertTrue(v.isDecimal()); assertEquals(decimal, v.getDecimal()); } private static void testNumeric(String string, BigInteger integer) { Expr expr = parseToEnd(string); Binding binding = BindingFactory.empty(); FunctionEnv env = new FunctionEnvBase(); NodeValue v = expr.eval(binding, env); assertTrue(v.isInteger()); assertEquals(integer, v.getInteger()); } private static void testNumeric(String string, double d) { Expr expr = parseToEnd(string); Binding binding = BindingFactory.empty(); FunctionEnv env = new FunctionEnvBase(); NodeValue v = expr.eval(binding, env); assertTrue(v.isDouble()); assertEquals(d, v.getDouble(), 0); } private static void testBoolean(String string, boolean b) { testBoolean(string, b, BindingFactory.empty()); } private static void testBoolean(String string, boolean b, Binding binding) { Expr expr = parseToEnd(string); FunctionEnv env = new FunctionEnvBase(); NodeValue v = expr.eval(binding, env); assertTrue(v.isBoolean()); assertEquals(b, v.getBoolean()); } private static void testURI(String string, String uri) { // Exception to the rule - parseAny Expr expr = parseAny(string); NodeValue v = expr.eval(env, new FunctionEnvBase()); assertTrue(v.isIRI()); assertEquals(uri, v.getNode().getURI()); } private static void testString(String string, String string2) { Expr expr = parseToEnd(string); NodeValue v = expr.eval(env, new FunctionEnvBase()); assertTrue(v.isString()); assertEquals(string2, v.getString()); } private static void testString(String string) { Expr expr = parseToEnd(string); NodeValue v = expr.eval(env, new FunctionEnvBase()); assertTrue(v.isString()); } }
googleapis/google-cloud-java
36,036
java-geminidataanalytics/google-cloud-geminidataanalytics/src/test/java/com/google/cloud/geminidataanalytics/v1beta/DataAgentServiceClientHttpJsonTest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.geminidataanalytics.v1beta; import static com.google.cloud.geminidataanalytics.v1beta.DataAgentServiceClient.ListAccessibleDataAgentsPagedResponse; import static com.google.cloud.geminidataanalytics.v1beta.DataAgentServiceClient.ListDataAgentsPagedResponse; import static com.google.cloud.geminidataanalytics.v1beta.DataAgentServiceClient.ListLocationsPagedResponse; import com.google.api.gax.core.NoCredentialsProvider; import com.google.api.gax.httpjson.GaxHttpJsonProperties; import com.google.api.gax.httpjson.testing.MockHttpService; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ApiException; import com.google.api.gax.rpc.ApiExceptionFactory; import com.google.api.gax.rpc.InvalidArgumentException; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.testing.FakeStatusCode; import com.google.api.resourcenames.ResourceName; import com.google.cloud.geminidataanalytics.v1beta.stub.HttpJsonDataAgentServiceStub; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.common.collect.Lists; import com.google.iam.v1.AuditConfig; import com.google.iam.v1.Binding; import com.google.iam.v1.Policy; import com.google.longrunning.Operation; import com.google.protobuf.Any; import com.google.protobuf.ByteString; import com.google.protobuf.Empty; import com.google.protobuf.FieldMask; import com.google.protobuf.Timestamp; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.concurrent.ExecutionException; import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @Generated("by gapic-generator-java") public class DataAgentServiceClientHttpJsonTest { private static MockHttpService mockService; private static DataAgentServiceClient client; @BeforeClass public static void startStaticServer() throws IOException { mockService = new MockHttpService( HttpJsonDataAgentServiceStub.getMethodDescriptors(), DataAgentServiceSettings.getDefaultEndpoint()); DataAgentServiceSettings settings = DataAgentServiceSettings.newHttpJsonBuilder() .setTransportChannelProvider( DataAgentServiceSettings.defaultHttpJsonTransportProviderBuilder() .setHttpTransport(mockService) .build()) .setCredentialsProvider(NoCredentialsProvider.create()) .build(); client = DataAgentServiceClient.create(settings); } @AfterClass public static void stopServer() { client.close(); } @Before public void setUp() {} @After public void tearDown() throws Exception { mockService.reset(); } @Test public void listDataAgentsTest() throws Exception { DataAgent responsesElement = DataAgent.newBuilder().build(); ListDataAgentsResponse expectedResponse = ListDataAgentsResponse.newBuilder() .setNextPageToken("") .addAllDataAgents(Arrays.asList(responsesElement)) .build(); mockService.addResponse(expectedResponse); LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); ListDataAgentsPagedResponse pagedListResponse = client.listDataAgents(parent); List<DataAgent> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getDataAgentsList().get(0), resources.get(0)); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void listDataAgentsExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); client.listDataAgents(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listDataAgentsTest2() throws Exception { DataAgent responsesElement = DataAgent.newBuilder().build(); ListDataAgentsResponse expectedResponse = ListDataAgentsResponse.newBuilder() .setNextPageToken("") .addAllDataAgents(Arrays.asList(responsesElement)) .build(); mockService.addResponse(expectedResponse); String parent = "projects/project-5833/locations/location-5833"; ListDataAgentsPagedResponse pagedListResponse = client.listDataAgents(parent); List<DataAgent> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getDataAgentsList().get(0), resources.get(0)); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void listDataAgentsExceptionTest2() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String parent = "projects/project-5833/locations/location-5833"; client.listDataAgents(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listAccessibleDataAgentsTest() throws Exception { DataAgent responsesElement = DataAgent.newBuilder().build(); ListAccessibleDataAgentsResponse expectedResponse = ListAccessibleDataAgentsResponse.newBuilder() .setNextPageToken("") .addAllDataAgents(Arrays.asList(responsesElement)) .build(); mockService.addResponse(expectedResponse); LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); ListAccessibleDataAgentsPagedResponse pagedListResponse = client.listAccessibleDataAgents(parent); List<DataAgent> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getDataAgentsList().get(0), resources.get(0)); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void listAccessibleDataAgentsExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); client.listAccessibleDataAgents(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listAccessibleDataAgentsTest2() throws Exception { DataAgent responsesElement = DataAgent.newBuilder().build(); ListAccessibleDataAgentsResponse expectedResponse = ListAccessibleDataAgentsResponse.newBuilder() .setNextPageToken("") .addAllDataAgents(Arrays.asList(responsesElement)) .build(); mockService.addResponse(expectedResponse); String parent = "projects/project-5833/locations/location-5833"; ListAccessibleDataAgentsPagedResponse pagedListResponse = client.listAccessibleDataAgents(parent); List<DataAgent> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getDataAgentsList().get(0), resources.get(0)); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void listAccessibleDataAgentsExceptionTest2() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String parent = "projects/project-5833/locations/location-5833"; client.listAccessibleDataAgents(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getDataAgentTest() throws Exception { DataAgent expectedResponse = DataAgent.newBuilder() .setName(DataAgentName.of("[PROJECT]", "[LOCATION]", "[DATA_AGENT]").toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .putAllLabels(new HashMap<String, String>()) .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setDeleteTime(Timestamp.newBuilder().build()) .setPurgeTime(Timestamp.newBuilder().build()) .build(); mockService.addResponse(expectedResponse); DataAgentName name = DataAgentName.of("[PROJECT]", "[LOCATION]", "[DATA_AGENT]"); DataAgent actualResponse = client.getDataAgent(name); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void getDataAgentExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { DataAgentName name = DataAgentName.of("[PROJECT]", "[LOCATION]", "[DATA_AGENT]"); client.getDataAgent(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getDataAgentTest2() throws Exception { DataAgent expectedResponse = DataAgent.newBuilder() .setName(DataAgentName.of("[PROJECT]", "[LOCATION]", "[DATA_AGENT]").toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .putAllLabels(new HashMap<String, String>()) .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setDeleteTime(Timestamp.newBuilder().build()) .setPurgeTime(Timestamp.newBuilder().build()) .build(); mockService.addResponse(expectedResponse); String name = "projects/project-8616/locations/location-8616/dataAgents/dataAgent-8616"; DataAgent actualResponse = client.getDataAgent(name); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void getDataAgentExceptionTest2() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String name = "projects/project-8616/locations/location-8616/dataAgents/dataAgent-8616"; client.getDataAgent(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void createDataAgentTest() throws Exception { DataAgent expectedResponse = DataAgent.newBuilder() .setName(DataAgentName.of("[PROJECT]", "[LOCATION]", "[DATA_AGENT]").toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .putAllLabels(new HashMap<String, String>()) .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setDeleteTime(Timestamp.newBuilder().build()) .setPurgeTime(Timestamp.newBuilder().build()) .build(); Operation resultOperation = Operation.newBuilder() .setName("createDataAgentTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockService.addResponse(resultOperation); LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); DataAgent dataAgent = DataAgent.newBuilder().build(); String dataAgentId = "dataAgentId1752773622"; DataAgent actualResponse = client.createDataAgentAsync(parent, dataAgent, dataAgentId).get(); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void createDataAgentExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); DataAgent dataAgent = DataAgent.newBuilder().build(); String dataAgentId = "dataAgentId1752773622"; client.createDataAgentAsync(parent, dataAgent, dataAgentId).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { } } @Test public void createDataAgentTest2() throws Exception { DataAgent expectedResponse = DataAgent.newBuilder() .setName(DataAgentName.of("[PROJECT]", "[LOCATION]", "[DATA_AGENT]").toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .putAllLabels(new HashMap<String, String>()) .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setDeleteTime(Timestamp.newBuilder().build()) .setPurgeTime(Timestamp.newBuilder().build()) .build(); Operation resultOperation = Operation.newBuilder() .setName("createDataAgentTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockService.addResponse(resultOperation); String parent = "projects/project-5833/locations/location-5833"; DataAgent dataAgent = DataAgent.newBuilder().build(); String dataAgentId = "dataAgentId1752773622"; DataAgent actualResponse = client.createDataAgentAsync(parent, dataAgent, dataAgentId).get(); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void createDataAgentExceptionTest2() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String parent = "projects/project-5833/locations/location-5833"; DataAgent dataAgent = DataAgent.newBuilder().build(); String dataAgentId = "dataAgentId1752773622"; client.createDataAgentAsync(parent, dataAgent, dataAgentId).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { } } @Test public void updateDataAgentTest() throws Exception { DataAgent expectedResponse = DataAgent.newBuilder() .setName(DataAgentName.of("[PROJECT]", "[LOCATION]", "[DATA_AGENT]").toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .putAllLabels(new HashMap<String, String>()) .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setDeleteTime(Timestamp.newBuilder().build()) .setPurgeTime(Timestamp.newBuilder().build()) .build(); Operation resultOperation = Operation.newBuilder() .setName("updateDataAgentTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockService.addResponse(resultOperation); DataAgent dataAgent = DataAgent.newBuilder() .setName(DataAgentName.of("[PROJECT]", "[LOCATION]", "[DATA_AGENT]").toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .putAllLabels(new HashMap<String, String>()) .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setDeleteTime(Timestamp.newBuilder().build()) .setPurgeTime(Timestamp.newBuilder().build()) .build(); FieldMask updateMask = FieldMask.newBuilder().build(); DataAgent actualResponse = client.updateDataAgentAsync(dataAgent, updateMask).get(); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void updateDataAgentExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { DataAgent dataAgent = DataAgent.newBuilder() .setName(DataAgentName.of("[PROJECT]", "[LOCATION]", "[DATA_AGENT]").toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .putAllLabels(new HashMap<String, String>()) .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setDeleteTime(Timestamp.newBuilder().build()) .setPurgeTime(Timestamp.newBuilder().build()) .build(); FieldMask updateMask = FieldMask.newBuilder().build(); client.updateDataAgentAsync(dataAgent, updateMask).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { } } @Test public void deleteDataAgentTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); Operation resultOperation = Operation.newBuilder() .setName("deleteDataAgentTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockService.addResponse(resultOperation); DataAgentName name = DataAgentName.of("[PROJECT]", "[LOCATION]", "[DATA_AGENT]"); client.deleteDataAgentAsync(name).get(); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void deleteDataAgentExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { DataAgentName name = DataAgentName.of("[PROJECT]", "[LOCATION]", "[DATA_AGENT]"); client.deleteDataAgentAsync(name).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { } } @Test public void deleteDataAgentTest2() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); Operation resultOperation = Operation.newBuilder() .setName("deleteDataAgentTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockService.addResponse(resultOperation); String name = "projects/project-8616/locations/location-8616/dataAgents/dataAgent-8616"; client.deleteDataAgentAsync(name).get(); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void deleteDataAgentExceptionTest2() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String name = "projects/project-8616/locations/location-8616/dataAgents/dataAgent-8616"; client.deleteDataAgentAsync(name).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { } } @Test public void getIamPolicyTest() throws Exception { Policy expectedResponse = Policy.newBuilder() .setVersion(351608024) .addAllBindings(new ArrayList<Binding>()) .addAllAuditConfigs(new ArrayList<AuditConfig>()) .setEtag(ByteString.EMPTY) .build(); mockService.addResponse(expectedResponse); ResourceName resource = DataAgentName.of("[PROJECT]", "[LOCATION]", "[DATA_AGENT]"); Policy actualResponse = client.getIamPolicy(resource); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void getIamPolicyExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { ResourceName resource = DataAgentName.of("[PROJECT]", "[LOCATION]", "[DATA_AGENT]"); client.getIamPolicy(resource); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getIamPolicyTest2() throws Exception { Policy expectedResponse = Policy.newBuilder() .setVersion(351608024) .addAllBindings(new ArrayList<Binding>()) .addAllAuditConfigs(new ArrayList<AuditConfig>()) .setEtag(ByteString.EMPTY) .build(); mockService.addResponse(expectedResponse); String resource = "projects/project-3285/locations/location-3285/dataAgents/dataAgent-3285"; Policy actualResponse = client.getIamPolicy(resource); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void getIamPolicyExceptionTest2() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String resource = "projects/project-3285/locations/location-3285/dataAgents/dataAgent-3285"; client.getIamPolicy(resource); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void setIamPolicyTest() throws Exception { Policy expectedResponse = Policy.newBuilder() .setVersion(351608024) .addAllBindings(new ArrayList<Binding>()) .addAllAuditConfigs(new ArrayList<AuditConfig>()) .setEtag(ByteString.EMPTY) .build(); mockService.addResponse(expectedResponse); ResourceName resource = DataAgentName.of("[PROJECT]", "[LOCATION]", "[DATA_AGENT]"); Policy actualResponse = client.setIamPolicy(resource); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void setIamPolicyExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { ResourceName resource = DataAgentName.of("[PROJECT]", "[LOCATION]", "[DATA_AGENT]"); client.setIamPolicy(resource); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void setIamPolicyTest2() throws Exception { Policy expectedResponse = Policy.newBuilder() .setVersion(351608024) .addAllBindings(new ArrayList<Binding>()) .addAllAuditConfigs(new ArrayList<AuditConfig>()) .setEtag(ByteString.EMPTY) .build(); mockService.addResponse(expectedResponse); String resource = "projects/project-3285/locations/location-3285/dataAgents/dataAgent-3285"; Policy actualResponse = client.setIamPolicy(resource); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void setIamPolicyExceptionTest2() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String resource = "projects/project-3285/locations/location-3285/dataAgents/dataAgent-3285"; client.setIamPolicy(resource); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listLocationsTest() throws Exception { Location responsesElement = Location.newBuilder().build(); ListLocationsResponse expectedResponse = ListLocationsResponse.newBuilder() .setNextPageToken("") .addAllLocations(Arrays.asList(responsesElement)) .build(); mockService.addResponse(expectedResponse); ListLocationsRequest request = ListLocationsRequest.newBuilder() .setName("projects/project-3664") .setFilter("filter-1274492040") .setPageSize(883849137) .setPageToken("pageToken873572522") .build(); ListLocationsPagedResponse pagedListResponse = client.listLocations(request); List<Location> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getLocationsList().get(0), resources.get(0)); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void listLocationsExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { ListLocationsRequest request = ListLocationsRequest.newBuilder() .setName("projects/project-3664") .setFilter("filter-1274492040") .setPageSize(883849137) .setPageToken("pageToken873572522") .build(); client.listLocations(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getLocationTest() throws Exception { Location expectedResponse = Location.newBuilder() .setName("name3373707") .setLocationId("locationId1541836720") .setDisplayName("displayName1714148973") .putAllLabels(new HashMap<String, String>()) .setMetadata(Any.newBuilder().build()) .build(); mockService.addResponse(expectedResponse); GetLocationRequest request = GetLocationRequest.newBuilder() .setName("projects/project-9062/locations/location-9062") .build(); Location actualResponse = client.getLocation(request); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void getLocationExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { GetLocationRequest request = GetLocationRequest.newBuilder() .setName("projects/project-9062/locations/location-9062") .build(); client.getLocation(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } }
apache/commons-math
36,331
commons-math-legacy/src/main/java/org/apache/commons/math4/legacy/linear/AbstractRealMatrix.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.math4.legacy.linear; import java.util.ArrayList; import java.util.Locale; import org.apache.commons.math4.legacy.exception.DimensionMismatchException; import org.apache.commons.math4.legacy.exception.NoDataException; import org.apache.commons.math4.legacy.exception.NotPositiveException; import org.apache.commons.math4.legacy.exception.NotStrictlyPositiveException; import org.apache.commons.math4.legacy.exception.NullArgumentException; import org.apache.commons.math4.legacy.exception.NumberIsTooSmallException; import org.apache.commons.math4.legacy.exception.OutOfRangeException; import org.apache.commons.math4.legacy.exception.util.LocalizedFormats; import org.apache.commons.math4.core.jdkmath.JdkMath; /** * Basic implementation of RealMatrix methods regardless of the underlying storage. * <p>All the methods implemented here use {@link #getEntry(int, int)} to access * matrix elements. Derived class can provide faster implementations.</p> * * @since 2.0 */ public abstract class AbstractRealMatrix extends RealLinearOperator implements RealMatrix { /** Default format. */ private static final RealMatrixFormat DEFAULT_FORMAT = RealMatrixFormat.getInstance(Locale.US); static { // set the minimum fraction digits to 1 to keep compatibility DEFAULT_FORMAT.getFormat().setMinimumFractionDigits(1); } /** * Creates a matrix with no data. */ protected AbstractRealMatrix() {} /** * Create a new RealMatrix with the supplied row and column dimensions. * * @param rowDimension the number of rows in the new matrix * @param columnDimension the number of columns in the new matrix * @throws NotStrictlyPositiveException if row or column dimension is not positive */ protected AbstractRealMatrix(final int rowDimension, final int columnDimension) throws NotStrictlyPositiveException { if (rowDimension < 1) { throw new NotStrictlyPositiveException(rowDimension); } if (columnDimension < 1) { throw new NotStrictlyPositiveException(columnDimension); } } /** {@inheritDoc} */ @Override public RealMatrix add(RealMatrix m) throws MatrixDimensionMismatchException { checkAdd(m); final int rowCount = getRowDimension(); final int columnCount = getColumnDimension(); final RealMatrix out = createMatrix(rowCount, columnCount); for (int row = 0; row < rowCount; ++row) { for (int col = 0; col < columnCount; ++col) { out.setEntry(row, col, getEntry(row, col) + m.getEntry(row, col)); } } return out; } /** {@inheritDoc} */ @Override public RealMatrix subtract(final RealMatrix m) throws MatrixDimensionMismatchException { checkAdd(m); final int rowCount = getRowDimension(); final int columnCount = getColumnDimension(); final RealMatrix out = createMatrix(rowCount, columnCount); for (int row = 0; row < rowCount; ++row) { for (int col = 0; col < columnCount; ++col) { out.setEntry(row, col, getEntry(row, col) - m.getEntry(row, col)); } } return out; } /** {@inheritDoc} */ @Override public RealMatrix scalarAdd(final double d) { final int rowCount = getRowDimension(); final int columnCount = getColumnDimension(); final RealMatrix out = createMatrix(rowCount, columnCount); for (int row = 0; row < rowCount; ++row) { for (int col = 0; col < columnCount; ++col) { out.setEntry(row, col, getEntry(row, col) + d); } } return out; } /** {@inheritDoc} */ @Override public RealMatrix scalarMultiply(final double d) { final int rowCount = getRowDimension(); final int columnCount = getColumnDimension(); final RealMatrix out = createMatrix(rowCount, columnCount); for (int row = 0; row < rowCount; ++row) { for (int col = 0; col < columnCount; ++col) { out.setEntry(row, col, getEntry(row, col) * d); } } return out; } /** {@inheritDoc} */ @Override public RealMatrix multiply(final RealMatrix m) throws DimensionMismatchException { checkMultiply(m); final int nRows = getRowDimension(); final int nCols = m.getColumnDimension(); final int nSum = getColumnDimension(); final RealMatrix out = createMatrix(nRows, nCols); for (int row = 0; row < nRows; ++row) { for (int col = 0; col < nCols; ++col) { double sum = 0; for (int i = 0; i < nSum; ++i) { sum += getEntry(row, i) * m.getEntry(i, col); } out.setEntry(row, col, sum); } } return out; } /** {@inheritDoc} */ @Override public RealMatrix preMultiply(final RealMatrix m) throws DimensionMismatchException { return m.multiply(this); } /** {@inheritDoc} */ @Override public RealMatrix power(final int p) throws NotPositiveException, NonSquareMatrixException { if (p < 0) { throw new NotPositiveException(LocalizedFormats.NOT_POSITIVE_EXPONENT, p); } if (!isSquare()) { throw new NonSquareMatrixException(getRowDimension(), getColumnDimension()); } if (p == 0) { return MatrixUtils.createRealIdentityMatrix(this.getRowDimension()); } if (p == 1) { return this.copy(); } final int power = p - 1; /* * Only log_2(p) operations is used by doing as follows: * 5^214 = 5^128 * 5^64 * 5^16 * 5^4 * 5^2 * * In general, the same approach is used for A^p. */ final char[] binaryRepresentation = Integer.toBinaryString(power).toCharArray(); final ArrayList<Integer> nonZeroPositions = new ArrayList<>(); int maxI = -1; for (int i = 0; i < binaryRepresentation.length; ++i) { if (binaryRepresentation[i] == '1') { final int pos = binaryRepresentation.length - i - 1; nonZeroPositions.add(pos); // The positions are taken in turn, so maxI is only changed once if (maxI == -1) { maxI = pos; } } } RealMatrix[] results = new RealMatrix[maxI + 1]; results[0] = this.copy(); for (int i = 1; i <= maxI; ++i) { results[i] = results[i-1].multiply(results[i-1]); } RealMatrix result = this.copy(); for (Integer i : nonZeroPositions) { result = result.multiply(results[i]); } return result; } /** {@inheritDoc} */ @Override public double[][] getData() { final double[][] data = new double[getRowDimension()][getColumnDimension()]; for (int i = 0; i < data.length; ++i) { final double[] dataI = data[i]; for (int j = 0; j < dataI.length; ++j) { dataI[j] = getEntry(i, j); } } return data; } /** {@inheritDoc} */ @Override public double getNorm() { return walkInColumnOrder(new RealMatrixPreservingVisitor() { /** Last row index. */ private double endRow; /** Sum of absolute values on one column. */ private double columnSum; /** Maximal sum across all columns. */ private double maxColSum; /** {@inheritDoc} */ @Override public void start(final int rows, final int columns, final int startRow, final int endRow, final int startColumn, final int endColumn) { this.endRow = endRow; columnSum = 0; maxColSum = 0; } /** {@inheritDoc} */ @Override public void visit(final int row, final int column, final double value) { columnSum += JdkMath.abs(value); if (row == endRow) { maxColSum = JdkMath.max(maxColSum, columnSum); columnSum = 0; } } /** {@inheritDoc} */ @Override public double end() { return maxColSum; } }); } /** {@inheritDoc} */ @Override public double getFrobeniusNorm() { return walkInOptimizedOrder(new RealMatrixPreservingVisitor() { /** Sum of squared entries. */ private double sum; /** {@inheritDoc} */ @Override public void start(final int rows, final int columns, final int startRow, final int endRow, final int startColumn, final int endColumn) { sum = 0; } /** {@inheritDoc} */ @Override public void visit(final int row, final int column, final double value) { sum += value * value; } /** {@inheritDoc} */ @Override public double end() { return JdkMath.sqrt(sum); } }); } /** {@inheritDoc} */ @Override public RealMatrix getSubMatrix(final int startRow, final int endRow, final int startColumn, final int endColumn) throws OutOfRangeException, NumberIsTooSmallException { MatrixUtils.checkSubMatrixIndex(this, startRow, endRow, startColumn, endColumn); final RealMatrix subMatrix = createMatrix(endRow - startRow + 1, endColumn - startColumn + 1); for (int i = startRow; i <= endRow; ++i) { for (int j = startColumn; j <= endColumn; ++j) { subMatrix.setEntry(i - startRow, j - startColumn, getEntry(i, j)); } } return subMatrix; } /** {@inheritDoc} */ @Override public RealMatrix getSubMatrix(final int[] selectedRows, final int[] selectedColumns) throws NullArgumentException, NoDataException, OutOfRangeException { MatrixUtils.checkSubMatrixIndex(this, selectedRows, selectedColumns); final RealMatrix subMatrix = createMatrix(selectedRows.length, selectedColumns.length); subMatrix.walkInOptimizedOrder(new DefaultRealMatrixChangingVisitor() { /** {@inheritDoc} */ @Override public double visit(final int row, final int column, final double value) { return getEntry(selectedRows[row], selectedColumns[column]); } }); return subMatrix; } /** {@inheritDoc} */ @Override public void copySubMatrix(final int startRow, final int endRow, final int startColumn, final int endColumn, final double[][] destination) throws OutOfRangeException, NumberIsTooSmallException, MatrixDimensionMismatchException { MatrixUtils.checkSubMatrixIndex(this, startRow, endRow, startColumn, endColumn); final int rowsCount = endRow + 1 - startRow; final int columnsCount = endColumn + 1 - startColumn; if (destination.length < rowsCount || destination[0].length < columnsCount) { throw new MatrixDimensionMismatchException(destination.length, destination[0].length, rowsCount, columnsCount); } for (int i = 1; i < rowsCount; i++) { if (destination[i].length < columnsCount) { throw new MatrixDimensionMismatchException(destination.length, destination[i].length, rowsCount, columnsCount); } } walkInOptimizedOrder(new DefaultRealMatrixPreservingVisitor() { /** Initial row index. */ private int startRow; /** Initial column index. */ private int startColumn; /** {@inheritDoc} */ @Override public void start(final int rows, final int columns, final int startRow, final int endRow, final int startColumn, final int endColumn) { this.startRow = startRow; this.startColumn = startColumn; } /** {@inheritDoc} */ @Override public void visit(final int row, final int column, final double value) { destination[row - startRow][column - startColumn] = value; } }, startRow, endRow, startColumn, endColumn); } /** {@inheritDoc} */ @Override public void copySubMatrix(int[] selectedRows, int[] selectedColumns, double[][] destination) throws OutOfRangeException, NullArgumentException, NoDataException, MatrixDimensionMismatchException { MatrixUtils.checkSubMatrixIndex(this, selectedRows, selectedColumns); final int nCols = selectedColumns.length; if (destination.length < selectedRows.length || destination[0].length < nCols) { throw new MatrixDimensionMismatchException(destination.length, destination[0].length, selectedRows.length, selectedColumns.length); } for (int i = 0; i < selectedRows.length; i++) { final double[] destinationI = destination[i]; if (destinationI.length < nCols) { throw new MatrixDimensionMismatchException(destination.length, destinationI.length, selectedRows.length, selectedColumns.length); } for (int j = 0; j < selectedColumns.length; j++) { destinationI[j] = getEntry(selectedRows[i], selectedColumns[j]); } } } /** {@inheritDoc} */ @Override public void setSubMatrix(final double[][] subMatrix, final int row, final int column) throws NoDataException, OutOfRangeException, DimensionMismatchException, NullArgumentException { NullArgumentException.check(subMatrix); final int nRows = subMatrix.length; if (nRows == 0) { throw new NoDataException(LocalizedFormats.AT_LEAST_ONE_ROW); } final int nCols = subMatrix[0].length; if (nCols == 0) { throw new NoDataException(LocalizedFormats.AT_LEAST_ONE_COLUMN); } for (int r = 1; r < nRows; ++r) { if (subMatrix[r].length != nCols) { throw new DimensionMismatchException(nCols, subMatrix[r].length); } } MatrixUtils.checkRowIndex(this, row); MatrixUtils.checkColumnIndex(this, column); MatrixUtils.checkRowIndex(this, nRows + row - 1); MatrixUtils.checkColumnIndex(this, nCols + column - 1); for (int i = 0; i < nRows; ++i) { for (int j = 0; j < nCols; ++j) { setEntry(row + i, column + j, subMatrix[i][j]); } } } /** {@inheritDoc} */ @Override public RealMatrix getRowMatrix(final int row) throws OutOfRangeException { MatrixUtils.checkRowIndex(this, row); final int nCols = getColumnDimension(); final RealMatrix out = createMatrix(1, nCols); for (int i = 0; i < nCols; ++i) { out.setEntry(0, i, getEntry(row, i)); } return out; } /** {@inheritDoc} */ @Override public void setRowMatrix(final int row, final RealMatrix matrix) throws OutOfRangeException, MatrixDimensionMismatchException { MatrixUtils.checkRowIndex(this, row); final int nCols = getColumnDimension(); if (matrix.getRowDimension() != 1 || matrix.getColumnDimension() != nCols) { throw new MatrixDimensionMismatchException(matrix.getRowDimension(), matrix.getColumnDimension(), 1, nCols); } for (int i = 0; i < nCols; ++i) { setEntry(row, i, matrix.getEntry(0, i)); } } /** {@inheritDoc} */ @Override public RealMatrix getColumnMatrix(final int column) throws OutOfRangeException { MatrixUtils.checkColumnIndex(this, column); final int nRows = getRowDimension(); final RealMatrix out = createMatrix(nRows, 1); for (int i = 0; i < nRows; ++i) { out.setEntry(i, 0, getEntry(i, column)); } return out; } /** {@inheritDoc} */ @Override public void setColumnMatrix(final int column, final RealMatrix matrix) throws OutOfRangeException, MatrixDimensionMismatchException { MatrixUtils.checkColumnIndex(this, column); final int nRows = getRowDimension(); if (matrix.getRowDimension() != nRows || matrix.getColumnDimension() != 1) { throw new MatrixDimensionMismatchException(matrix.getRowDimension(), matrix.getColumnDimension(), nRows, 1); } for (int i = 0; i < nRows; ++i) { setEntry(i, column, matrix.getEntry(i, 0)); } } /** {@inheritDoc} */ @Override public RealVector getRowVector(final int row) throws OutOfRangeException { return new ArrayRealVector(getRow(row), false); } /** {@inheritDoc} */ @Override public void setRowVector(final int row, final RealVector vector) throws OutOfRangeException, MatrixDimensionMismatchException { MatrixUtils.checkRowIndex(this, row); final int nCols = getColumnDimension(); if (vector.getDimension() != nCols) { throw new MatrixDimensionMismatchException(1, vector.getDimension(), 1, nCols); } for (int i = 0; i < nCols; ++i) { setEntry(row, i, vector.getEntry(i)); } } /** {@inheritDoc} */ @Override public RealVector getColumnVector(final int column) throws OutOfRangeException { return new ArrayRealVector(getColumn(column), false); } /** {@inheritDoc} */ @Override public void setColumnVector(final int column, final RealVector vector) throws OutOfRangeException, MatrixDimensionMismatchException { MatrixUtils.checkColumnIndex(this, column); final int nRows = getRowDimension(); if (vector.getDimension() != nRows) { throw new MatrixDimensionMismatchException(vector.getDimension(), 1, nRows, 1); } for (int i = 0; i < nRows; ++i) { setEntry(i, column, vector.getEntry(i)); } } /** {@inheritDoc} */ @Override public double[] getRow(final int row) throws OutOfRangeException { MatrixUtils.checkRowIndex(this, row); final int nCols = getColumnDimension(); final double[] out = new double[nCols]; for (int i = 0; i < nCols; ++i) { out[i] = getEntry(row, i); } return out; } /** {@inheritDoc} */ @Override public void setRow(final int row, final double[] array) throws OutOfRangeException, MatrixDimensionMismatchException { MatrixUtils.checkRowIndex(this, row); final int nCols = getColumnDimension(); if (array.length != nCols) { throw new MatrixDimensionMismatchException(1, array.length, 1, nCols); } for (int i = 0; i < nCols; ++i) { setEntry(row, i, array[i]); } } /** {@inheritDoc} */ @Override public double[] getColumn(final int column) throws OutOfRangeException { MatrixUtils.checkColumnIndex(this, column); final int nRows = getRowDimension(); final double[] out = new double[nRows]; for (int i = 0; i < nRows; ++i) { out[i] = getEntry(i, column); } return out; } /** {@inheritDoc} */ @Override public void setColumn(final int column, final double[] array) throws OutOfRangeException, MatrixDimensionMismatchException { MatrixUtils.checkColumnIndex(this, column); final int nRows = getRowDimension(); if (array.length != nRows) { throw new MatrixDimensionMismatchException(array.length, 1, nRows, 1); } for (int i = 0; i < nRows; ++i) { setEntry(i, column, array[i]); } } /** {@inheritDoc} */ @Override public void addToEntry(int row, int column, double increment) throws OutOfRangeException { MatrixUtils.checkMatrixIndex(this, row, column); setEntry(row, column, getEntry(row, column) + increment); } /** {@inheritDoc} */ @Override public void multiplyEntry(int row, int column, double factor) throws OutOfRangeException { MatrixUtils.checkMatrixIndex(this, row, column); setEntry(row, column, getEntry(row, column) * factor); } /** {@inheritDoc} */ @Override public RealMatrix transpose() { final int nRows = getRowDimension(); final int nCols = getColumnDimension(); final RealMatrix out = createMatrix(nCols, nRows); walkInOptimizedOrder(new DefaultRealMatrixPreservingVisitor() { /** {@inheritDoc} */ @Override public void visit(final int row, final int column, final double value) { out.setEntry(column, row, value); } }); return out; } /** {@inheritDoc} */ @Override public double getTrace() throws NonSquareMatrixException { final int nRows = getRowDimension(); final int nCols = getColumnDimension(); if (nRows != nCols) { throw new NonSquareMatrixException(nRows, nCols); } double trace = 0; for (int i = 0; i < nRows; ++i) { trace += getEntry(i, i); } return trace; } /** {@inheritDoc} */ @Override public double[] operate(final double[] v) throws DimensionMismatchException { final int nRows = getRowDimension(); final int nCols = getColumnDimension(); if (v.length != nCols) { throw new DimensionMismatchException(v.length, nCols); } final double[] out = new double[nRows]; for (int row = 0; row < nRows; ++row) { double sum = 0; for (int i = 0; i < nCols; ++i) { sum += getEntry(row, i) * v[i]; } out[row] = sum; } return out; } /** {@inheritDoc} */ @Override public RealVector operate(final RealVector v) throws DimensionMismatchException { if (v instanceof ArrayRealVector) { return new ArrayRealVector(operate(((ArrayRealVector) v).getDataRef()), false); } final int nRows = getRowDimension(); final int nCols = getColumnDimension(); if (v.getDimension() != nCols) { throw new DimensionMismatchException(v.getDimension(), nCols); } final double[] out = new double[nRows]; for (int row = 0; row < nRows; ++row) { double sum = 0; for (int i = 0; i < nCols; ++i) { sum += getEntry(row, i) * v.getEntry(i); } out[row] = sum; } return new ArrayRealVector(out, false); } /** {@inheritDoc} */ @Override public double[] preMultiply(final double[] v) throws DimensionMismatchException { final int nRows = getRowDimension(); final int nCols = getColumnDimension(); if (v.length != nRows) { throw new DimensionMismatchException(v.length, nRows); } final double[] out = new double[nCols]; for (int col = 0; col < nCols; ++col) { double sum = 0; for (int i = 0; i < nRows; ++i) { sum += getEntry(i, col) * v[i]; } out[col] = sum; } return out; } /** {@inheritDoc} */ @Override public RealVector preMultiply(final RealVector v) throws DimensionMismatchException { if (v instanceof ArrayRealVector) { return new ArrayRealVector(preMultiply(((ArrayRealVector) v).getDataRef()), false); } final int nRows = getRowDimension(); final int nCols = getColumnDimension(); if (v.getDimension() != nRows) { throw new DimensionMismatchException(v.getDimension(), nRows); } final double[] out = new double[nCols]; for (int col = 0; col < nCols; ++col) { double sum = 0; for (int i = 0; i < nRows; ++i) { sum += getEntry(i, col) * v.getEntry(i); } out[col] = sum; } return new ArrayRealVector(out, false); } /** {@inheritDoc} */ @Override public double walkInRowOrder(final RealMatrixChangingVisitor visitor) { final int rows = getRowDimension(); final int columns = getColumnDimension(); visitor.start(rows, columns, 0, rows - 1, 0, columns - 1); for (int row = 0; row < rows; ++row) { for (int column = 0; column < columns; ++column) { final double oldValue = getEntry(row, column); final double newValue = visitor.visit(row, column, oldValue); setEntry(row, column, newValue); } } return visitor.end(); } /** {@inheritDoc} */ @Override public double walkInRowOrder(final RealMatrixPreservingVisitor visitor) { final int rows = getRowDimension(); final int columns = getColumnDimension(); visitor.start(rows, columns, 0, rows - 1, 0, columns - 1); for (int row = 0; row < rows; ++row) { for (int column = 0; column < columns; ++column) { visitor.visit(row, column, getEntry(row, column)); } } return visitor.end(); } /** {@inheritDoc} */ @Override public double walkInRowOrder(final RealMatrixChangingVisitor visitor, final int startRow, final int endRow, final int startColumn, final int endColumn) throws OutOfRangeException, NumberIsTooSmallException { MatrixUtils.checkSubMatrixIndex(this, startRow, endRow, startColumn, endColumn); visitor.start(getRowDimension(), getColumnDimension(), startRow, endRow, startColumn, endColumn); for (int row = startRow; row <= endRow; ++row) { for (int column = startColumn; column <= endColumn; ++column) { final double oldValue = getEntry(row, column); final double newValue = visitor.visit(row, column, oldValue); setEntry(row, column, newValue); } } return visitor.end(); } /** {@inheritDoc} */ @Override public double walkInRowOrder(final RealMatrixPreservingVisitor visitor, final int startRow, final int endRow, final int startColumn, final int endColumn) throws OutOfRangeException, NumberIsTooSmallException { MatrixUtils.checkSubMatrixIndex(this, startRow, endRow, startColumn, endColumn); visitor.start(getRowDimension(), getColumnDimension(), startRow, endRow, startColumn, endColumn); for (int row = startRow; row <= endRow; ++row) { for (int column = startColumn; column <= endColumn; ++column) { visitor.visit(row, column, getEntry(row, column)); } } return visitor.end(); } /** {@inheritDoc} */ @Override public double walkInColumnOrder(final RealMatrixChangingVisitor visitor) { final int rows = getRowDimension(); final int columns = getColumnDimension(); visitor.start(rows, columns, 0, rows - 1, 0, columns - 1); for (int column = 0; column < columns; ++column) { for (int row = 0; row < rows; ++row) { final double oldValue = getEntry(row, column); final double newValue = visitor.visit(row, column, oldValue); setEntry(row, column, newValue); } } return visitor.end(); } /** {@inheritDoc} */ @Override public double walkInColumnOrder(final RealMatrixPreservingVisitor visitor) { final int rows = getRowDimension(); final int columns = getColumnDimension(); visitor.start(rows, columns, 0, rows - 1, 0, columns - 1); for (int column = 0; column < columns; ++column) { for (int row = 0; row < rows; ++row) { visitor.visit(row, column, getEntry(row, column)); } } return visitor.end(); } /** {@inheritDoc} */ @Override public double walkInColumnOrder(final RealMatrixChangingVisitor visitor, final int startRow, final int endRow, final int startColumn, final int endColumn) throws OutOfRangeException, NumberIsTooSmallException { MatrixUtils.checkSubMatrixIndex(this, startRow, endRow, startColumn, endColumn); visitor.start(getRowDimension(), getColumnDimension(), startRow, endRow, startColumn, endColumn); for (int column = startColumn; column <= endColumn; ++column) { for (int row = startRow; row <= endRow; ++row) { final double oldValue = getEntry(row, column); final double newValue = visitor.visit(row, column, oldValue); setEntry(row, column, newValue); } } return visitor.end(); } /** {@inheritDoc} */ @Override public double walkInColumnOrder(final RealMatrixPreservingVisitor visitor, final int startRow, final int endRow, final int startColumn, final int endColumn) throws OutOfRangeException, NumberIsTooSmallException { MatrixUtils.checkSubMatrixIndex(this, startRow, endRow, startColumn, endColumn); visitor.start(getRowDimension(), getColumnDimension(), startRow, endRow, startColumn, endColumn); for (int column = startColumn; column <= endColumn; ++column) { for (int row = startRow; row <= endRow; ++row) { visitor.visit(row, column, getEntry(row, column)); } } return visitor.end(); } /** {@inheritDoc} */ @Override public double walkInOptimizedOrder(final RealMatrixChangingVisitor visitor) { return walkInRowOrder(visitor); } /** {@inheritDoc} */ @Override public double walkInOptimizedOrder(final RealMatrixPreservingVisitor visitor) { return walkInRowOrder(visitor); } /** {@inheritDoc} */ @Override public double walkInOptimizedOrder(final RealMatrixChangingVisitor visitor, final int startRow, final int endRow, final int startColumn, final int endColumn) throws OutOfRangeException, NumberIsTooSmallException { return walkInRowOrder(visitor, startRow, endRow, startColumn, endColumn); } /** {@inheritDoc} */ @Override public double walkInOptimizedOrder(final RealMatrixPreservingVisitor visitor, final int startRow, final int endRow, final int startColumn, final int endColumn) throws OutOfRangeException, NumberIsTooSmallException { return walkInRowOrder(visitor, startRow, endRow, startColumn, endColumn); } /** * Get a string representation for this matrix. * @return a string representation for this matrix */ @Override public String toString() { final StringBuilder res = new StringBuilder(); String fullClassName = getClass().getName(); String shortClassName = fullClassName.substring(fullClassName.lastIndexOf('.') + 1); res.append(shortClassName); res.append(DEFAULT_FORMAT.format(this)); return res.toString(); } /** * Returns true iff <code>object</code> is a * <code>RealMatrix</code> instance with the same dimensions as this * and all corresponding matrix entries are equal. * * @param object the object to test equality against. * @return true if object equals this */ @Override public boolean equals(final Object object) { if (object == this ) { return true; } if (!(object instanceof RealMatrix)) { return false; } RealMatrix m = (RealMatrix) object; final int nRows = getRowDimension(); final int nCols = getColumnDimension(); if (m.getColumnDimension() != nCols || m.getRowDimension() != nRows) { return false; } for (int row = 0; row < nRows; ++row) { for (int col = 0; col < nCols; ++col) { if (getEntry(row, col) != m.getEntry(row, col)) { return false; } } } return true; } /** * Computes a hash code for the matrix. * * @return hash code for matrix */ @Override public int hashCode() { int ret = 7; final int nRows = getRowDimension(); final int nCols = getColumnDimension(); ret = ret * 31 + nRows; ret = ret * 31 + nCols; for (int row = 0; row < nRows; ++row) { for (int col = 0; col < nCols; ++col) { ret = ret * 31 + (11 * (row+1) + 17 * (col+1)) * Double.hashCode(getEntry(row, col)); } } return ret; } /* * Empty implementations of these methods are provided in order to allow for * the use of the @Override tag with Java 1.5. */ /** {@inheritDoc} */ @Override public abstract RealMatrix createMatrix(int rowDimension, int columnDimension) throws NotStrictlyPositiveException; /** {@inheritDoc} */ @Override public abstract RealMatrix copy(); /** {@inheritDoc} */ @Override public abstract double getEntry(int row, int column) throws OutOfRangeException; /** {@inheritDoc} */ @Override public abstract void setEntry(int row, int column, double value) throws OutOfRangeException; }
googleads/google-ads-java
36,415
google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/services/GenerateTargetingSuggestionMetricsResponse.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v19/services/audience_insights_service.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v19.services; /** * <pre> * Response message for * [AudienceInsightsService.GenerateTargetingSuggestionMetrics][google.ads.googleads.v19.services.AudienceInsightsService.GenerateTargetingSuggestionMetrics]. * </pre> * * Protobuf type {@code google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse} */ public final class GenerateTargetingSuggestionMetricsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse) GenerateTargetingSuggestionMetricsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use GenerateTargetingSuggestionMetricsResponse.newBuilder() to construct. private GenerateTargetingSuggestionMetricsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private GenerateTargetingSuggestionMetricsResponse() { suggestions_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new GenerateTargetingSuggestionMetricsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v19_services_GenerateTargetingSuggestionMetricsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v19_services_GenerateTargetingSuggestionMetricsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse.class, com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse.Builder.class); } public static final int SUGGESTIONS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.ads.googleads.v19.services.TargetingSuggestionMetrics> suggestions_; /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v19.services.TargetingSuggestionMetrics suggestions = 1;</code> */ @java.lang.Override public java.util.List<com.google.ads.googleads.v19.services.TargetingSuggestionMetrics> getSuggestionsList() { return suggestions_; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v19.services.TargetingSuggestionMetrics suggestions = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.ads.googleads.v19.services.TargetingSuggestionMetricsOrBuilder> getSuggestionsOrBuilderList() { return suggestions_; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v19.services.TargetingSuggestionMetrics suggestions = 1;</code> */ @java.lang.Override public int getSuggestionsCount() { return suggestions_.size(); } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v19.services.TargetingSuggestionMetrics suggestions = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v19.services.TargetingSuggestionMetrics getSuggestions(int index) { return suggestions_.get(index); } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v19.services.TargetingSuggestionMetrics suggestions = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v19.services.TargetingSuggestionMetricsOrBuilder getSuggestionsOrBuilder( int index) { return suggestions_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < suggestions_.size(); i++) { output.writeMessage(1, suggestions_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < suggestions_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, suggestions_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse)) { return super.equals(obj); } com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse other = (com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse) obj; if (!getSuggestionsList() .equals(other.getSuggestionsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getSuggestionsCount() > 0) { hash = (37 * hash) + SUGGESTIONS_FIELD_NUMBER; hash = (53 * hash) + getSuggestionsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Response message for * [AudienceInsightsService.GenerateTargetingSuggestionMetrics][google.ads.googleads.v19.services.AudienceInsightsService.GenerateTargetingSuggestionMetrics]. * </pre> * * Protobuf type {@code google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse) com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v19_services_GenerateTargetingSuggestionMetricsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v19_services_GenerateTargetingSuggestionMetricsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse.class, com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse.Builder.class); } // Construct using com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (suggestionsBuilder_ == null) { suggestions_ = java.util.Collections.emptyList(); } else { suggestions_ = null; suggestionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v19.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v19_services_GenerateTargetingSuggestionMetricsResponse_descriptor; } @java.lang.Override public com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse getDefaultInstanceForType() { return com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse build() { com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse buildPartial() { com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse result = new com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse result) { if (suggestionsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { suggestions_ = java.util.Collections.unmodifiableList(suggestions_); bitField0_ = (bitField0_ & ~0x00000001); } result.suggestions_ = suggestions_; } else { result.suggestions_ = suggestionsBuilder_.build(); } } private void buildPartial0(com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse) { return mergeFrom((com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse other) { if (other == com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse.getDefaultInstance()) return this; if (suggestionsBuilder_ == null) { if (!other.suggestions_.isEmpty()) { if (suggestions_.isEmpty()) { suggestions_ = other.suggestions_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureSuggestionsIsMutable(); suggestions_.addAll(other.suggestions_); } onChanged(); } } else { if (!other.suggestions_.isEmpty()) { if (suggestionsBuilder_.isEmpty()) { suggestionsBuilder_.dispose(); suggestionsBuilder_ = null; suggestions_ = other.suggestions_; bitField0_ = (bitField0_ & ~0x00000001); suggestionsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getSuggestionsFieldBuilder() : null; } else { suggestionsBuilder_.addAllMessages(other.suggestions_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.ads.googleads.v19.services.TargetingSuggestionMetrics m = input.readMessage( com.google.ads.googleads.v19.services.TargetingSuggestionMetrics.parser(), extensionRegistry); if (suggestionsBuilder_ == null) { ensureSuggestionsIsMutable(); suggestions_.add(m); } else { suggestionsBuilder_.addMessage(m); } break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.ads.googleads.v19.services.TargetingSuggestionMetrics> suggestions_ = java.util.Collections.emptyList(); private void ensureSuggestionsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { suggestions_ = new java.util.ArrayList<com.google.ads.googleads.v19.services.TargetingSuggestionMetrics>(suggestions_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v19.services.TargetingSuggestionMetrics, com.google.ads.googleads.v19.services.TargetingSuggestionMetrics.Builder, com.google.ads.googleads.v19.services.TargetingSuggestionMetricsOrBuilder> suggestionsBuilder_; /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v19.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public java.util.List<com.google.ads.googleads.v19.services.TargetingSuggestionMetrics> getSuggestionsList() { if (suggestionsBuilder_ == null) { return java.util.Collections.unmodifiableList(suggestions_); } else { return suggestionsBuilder_.getMessageList(); } } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v19.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public int getSuggestionsCount() { if (suggestionsBuilder_ == null) { return suggestions_.size(); } else { return suggestionsBuilder_.getCount(); } } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v19.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public com.google.ads.googleads.v19.services.TargetingSuggestionMetrics getSuggestions(int index) { if (suggestionsBuilder_ == null) { return suggestions_.get(index); } else { return suggestionsBuilder_.getMessage(index); } } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v19.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public Builder setSuggestions( int index, com.google.ads.googleads.v19.services.TargetingSuggestionMetrics value) { if (suggestionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSuggestionsIsMutable(); suggestions_.set(index, value); onChanged(); } else { suggestionsBuilder_.setMessage(index, value); } return this; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v19.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public Builder setSuggestions( int index, com.google.ads.googleads.v19.services.TargetingSuggestionMetrics.Builder builderForValue) { if (suggestionsBuilder_ == null) { ensureSuggestionsIsMutable(); suggestions_.set(index, builderForValue.build()); onChanged(); } else { suggestionsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v19.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public Builder addSuggestions(com.google.ads.googleads.v19.services.TargetingSuggestionMetrics value) { if (suggestionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSuggestionsIsMutable(); suggestions_.add(value); onChanged(); } else { suggestionsBuilder_.addMessage(value); } return this; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v19.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public Builder addSuggestions( int index, com.google.ads.googleads.v19.services.TargetingSuggestionMetrics value) { if (suggestionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSuggestionsIsMutable(); suggestions_.add(index, value); onChanged(); } else { suggestionsBuilder_.addMessage(index, value); } return this; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v19.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public Builder addSuggestions( com.google.ads.googleads.v19.services.TargetingSuggestionMetrics.Builder builderForValue) { if (suggestionsBuilder_ == null) { ensureSuggestionsIsMutable(); suggestions_.add(builderForValue.build()); onChanged(); } else { suggestionsBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v19.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public Builder addSuggestions( int index, com.google.ads.googleads.v19.services.TargetingSuggestionMetrics.Builder builderForValue) { if (suggestionsBuilder_ == null) { ensureSuggestionsIsMutable(); suggestions_.add(index, builderForValue.build()); onChanged(); } else { suggestionsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v19.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public Builder addAllSuggestions( java.lang.Iterable<? extends com.google.ads.googleads.v19.services.TargetingSuggestionMetrics> values) { if (suggestionsBuilder_ == null) { ensureSuggestionsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, suggestions_); onChanged(); } else { suggestionsBuilder_.addAllMessages(values); } return this; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v19.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public Builder clearSuggestions() { if (suggestionsBuilder_ == null) { suggestions_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { suggestionsBuilder_.clear(); } return this; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v19.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public Builder removeSuggestions(int index) { if (suggestionsBuilder_ == null) { ensureSuggestionsIsMutable(); suggestions_.remove(index); onChanged(); } else { suggestionsBuilder_.remove(index); } return this; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v19.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public com.google.ads.googleads.v19.services.TargetingSuggestionMetrics.Builder getSuggestionsBuilder( int index) { return getSuggestionsFieldBuilder().getBuilder(index); } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v19.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public com.google.ads.googleads.v19.services.TargetingSuggestionMetricsOrBuilder getSuggestionsOrBuilder( int index) { if (suggestionsBuilder_ == null) { return suggestions_.get(index); } else { return suggestionsBuilder_.getMessageOrBuilder(index); } } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v19.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public java.util.List<? extends com.google.ads.googleads.v19.services.TargetingSuggestionMetricsOrBuilder> getSuggestionsOrBuilderList() { if (suggestionsBuilder_ != null) { return suggestionsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(suggestions_); } } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v19.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public com.google.ads.googleads.v19.services.TargetingSuggestionMetrics.Builder addSuggestionsBuilder() { return getSuggestionsFieldBuilder().addBuilder( com.google.ads.googleads.v19.services.TargetingSuggestionMetrics.getDefaultInstance()); } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v19.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public com.google.ads.googleads.v19.services.TargetingSuggestionMetrics.Builder addSuggestionsBuilder( int index) { return getSuggestionsFieldBuilder().addBuilder( index, com.google.ads.googleads.v19.services.TargetingSuggestionMetrics.getDefaultInstance()); } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v19.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public java.util.List<com.google.ads.googleads.v19.services.TargetingSuggestionMetrics.Builder> getSuggestionsBuilderList() { return getSuggestionsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v19.services.TargetingSuggestionMetrics, com.google.ads.googleads.v19.services.TargetingSuggestionMetrics.Builder, com.google.ads.googleads.v19.services.TargetingSuggestionMetricsOrBuilder> getSuggestionsFieldBuilder() { if (suggestionsBuilder_ == null) { suggestionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v19.services.TargetingSuggestionMetrics, com.google.ads.googleads.v19.services.TargetingSuggestionMetrics.Builder, com.google.ads.googleads.v19.services.TargetingSuggestionMetricsOrBuilder>( suggestions_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); suggestions_ = null; } return suggestionsBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse) private static final com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse(); } public static com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<GenerateTargetingSuggestionMetricsResponse> PARSER = new com.google.protobuf.AbstractParser<GenerateTargetingSuggestionMetricsResponse>() { @java.lang.Override public GenerateTargetingSuggestionMetricsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<GenerateTargetingSuggestionMetricsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<GenerateTargetingSuggestionMetricsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v19.services.GenerateTargetingSuggestionMetricsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
36,415
google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/services/GenerateTargetingSuggestionMetricsResponse.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v20/services/audience_insights_service.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v20.services; /** * <pre> * Response message for * [AudienceInsightsService.GenerateTargetingSuggestionMetrics][google.ads.googleads.v20.services.AudienceInsightsService.GenerateTargetingSuggestionMetrics]. * </pre> * * Protobuf type {@code google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse} */ public final class GenerateTargetingSuggestionMetricsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse) GenerateTargetingSuggestionMetricsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use GenerateTargetingSuggestionMetricsResponse.newBuilder() to construct. private GenerateTargetingSuggestionMetricsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private GenerateTargetingSuggestionMetricsResponse() { suggestions_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new GenerateTargetingSuggestionMetricsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v20_services_GenerateTargetingSuggestionMetricsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v20_services_GenerateTargetingSuggestionMetricsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse.class, com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse.Builder.class); } public static final int SUGGESTIONS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.ads.googleads.v20.services.TargetingSuggestionMetrics> suggestions_; /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v20.services.TargetingSuggestionMetrics suggestions = 1;</code> */ @java.lang.Override public java.util.List<com.google.ads.googleads.v20.services.TargetingSuggestionMetrics> getSuggestionsList() { return suggestions_; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v20.services.TargetingSuggestionMetrics suggestions = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.ads.googleads.v20.services.TargetingSuggestionMetricsOrBuilder> getSuggestionsOrBuilderList() { return suggestions_; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v20.services.TargetingSuggestionMetrics suggestions = 1;</code> */ @java.lang.Override public int getSuggestionsCount() { return suggestions_.size(); } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v20.services.TargetingSuggestionMetrics suggestions = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v20.services.TargetingSuggestionMetrics getSuggestions(int index) { return suggestions_.get(index); } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v20.services.TargetingSuggestionMetrics suggestions = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v20.services.TargetingSuggestionMetricsOrBuilder getSuggestionsOrBuilder( int index) { return suggestions_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < suggestions_.size(); i++) { output.writeMessage(1, suggestions_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < suggestions_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, suggestions_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse)) { return super.equals(obj); } com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse other = (com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse) obj; if (!getSuggestionsList() .equals(other.getSuggestionsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getSuggestionsCount() > 0) { hash = (37 * hash) + SUGGESTIONS_FIELD_NUMBER; hash = (53 * hash) + getSuggestionsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Response message for * [AudienceInsightsService.GenerateTargetingSuggestionMetrics][google.ads.googleads.v20.services.AudienceInsightsService.GenerateTargetingSuggestionMetrics]. * </pre> * * Protobuf type {@code google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse) com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v20_services_GenerateTargetingSuggestionMetricsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v20_services_GenerateTargetingSuggestionMetricsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse.class, com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse.Builder.class); } // Construct using com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (suggestionsBuilder_ == null) { suggestions_ = java.util.Collections.emptyList(); } else { suggestions_ = null; suggestionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v20.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v20_services_GenerateTargetingSuggestionMetricsResponse_descriptor; } @java.lang.Override public com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse getDefaultInstanceForType() { return com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse build() { com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse buildPartial() { com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse result = new com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse result) { if (suggestionsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { suggestions_ = java.util.Collections.unmodifiableList(suggestions_); bitField0_ = (bitField0_ & ~0x00000001); } result.suggestions_ = suggestions_; } else { result.suggestions_ = suggestionsBuilder_.build(); } } private void buildPartial0(com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse) { return mergeFrom((com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse other) { if (other == com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse.getDefaultInstance()) return this; if (suggestionsBuilder_ == null) { if (!other.suggestions_.isEmpty()) { if (suggestions_.isEmpty()) { suggestions_ = other.suggestions_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureSuggestionsIsMutable(); suggestions_.addAll(other.suggestions_); } onChanged(); } } else { if (!other.suggestions_.isEmpty()) { if (suggestionsBuilder_.isEmpty()) { suggestionsBuilder_.dispose(); suggestionsBuilder_ = null; suggestions_ = other.suggestions_; bitField0_ = (bitField0_ & ~0x00000001); suggestionsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getSuggestionsFieldBuilder() : null; } else { suggestionsBuilder_.addAllMessages(other.suggestions_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.ads.googleads.v20.services.TargetingSuggestionMetrics m = input.readMessage( com.google.ads.googleads.v20.services.TargetingSuggestionMetrics.parser(), extensionRegistry); if (suggestionsBuilder_ == null) { ensureSuggestionsIsMutable(); suggestions_.add(m); } else { suggestionsBuilder_.addMessage(m); } break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.ads.googleads.v20.services.TargetingSuggestionMetrics> suggestions_ = java.util.Collections.emptyList(); private void ensureSuggestionsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { suggestions_ = new java.util.ArrayList<com.google.ads.googleads.v20.services.TargetingSuggestionMetrics>(suggestions_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v20.services.TargetingSuggestionMetrics, com.google.ads.googleads.v20.services.TargetingSuggestionMetrics.Builder, com.google.ads.googleads.v20.services.TargetingSuggestionMetricsOrBuilder> suggestionsBuilder_; /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v20.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public java.util.List<com.google.ads.googleads.v20.services.TargetingSuggestionMetrics> getSuggestionsList() { if (suggestionsBuilder_ == null) { return java.util.Collections.unmodifiableList(suggestions_); } else { return suggestionsBuilder_.getMessageList(); } } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v20.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public int getSuggestionsCount() { if (suggestionsBuilder_ == null) { return suggestions_.size(); } else { return suggestionsBuilder_.getCount(); } } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v20.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public com.google.ads.googleads.v20.services.TargetingSuggestionMetrics getSuggestions(int index) { if (suggestionsBuilder_ == null) { return suggestions_.get(index); } else { return suggestionsBuilder_.getMessage(index); } } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v20.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public Builder setSuggestions( int index, com.google.ads.googleads.v20.services.TargetingSuggestionMetrics value) { if (suggestionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSuggestionsIsMutable(); suggestions_.set(index, value); onChanged(); } else { suggestionsBuilder_.setMessage(index, value); } return this; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v20.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public Builder setSuggestions( int index, com.google.ads.googleads.v20.services.TargetingSuggestionMetrics.Builder builderForValue) { if (suggestionsBuilder_ == null) { ensureSuggestionsIsMutable(); suggestions_.set(index, builderForValue.build()); onChanged(); } else { suggestionsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v20.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public Builder addSuggestions(com.google.ads.googleads.v20.services.TargetingSuggestionMetrics value) { if (suggestionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSuggestionsIsMutable(); suggestions_.add(value); onChanged(); } else { suggestionsBuilder_.addMessage(value); } return this; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v20.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public Builder addSuggestions( int index, com.google.ads.googleads.v20.services.TargetingSuggestionMetrics value) { if (suggestionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSuggestionsIsMutable(); suggestions_.add(index, value); onChanged(); } else { suggestionsBuilder_.addMessage(index, value); } return this; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v20.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public Builder addSuggestions( com.google.ads.googleads.v20.services.TargetingSuggestionMetrics.Builder builderForValue) { if (suggestionsBuilder_ == null) { ensureSuggestionsIsMutable(); suggestions_.add(builderForValue.build()); onChanged(); } else { suggestionsBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v20.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public Builder addSuggestions( int index, com.google.ads.googleads.v20.services.TargetingSuggestionMetrics.Builder builderForValue) { if (suggestionsBuilder_ == null) { ensureSuggestionsIsMutable(); suggestions_.add(index, builderForValue.build()); onChanged(); } else { suggestionsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v20.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public Builder addAllSuggestions( java.lang.Iterable<? extends com.google.ads.googleads.v20.services.TargetingSuggestionMetrics> values) { if (suggestionsBuilder_ == null) { ensureSuggestionsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, suggestions_); onChanged(); } else { suggestionsBuilder_.addAllMessages(values); } return this; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v20.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public Builder clearSuggestions() { if (suggestionsBuilder_ == null) { suggestions_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { suggestionsBuilder_.clear(); } return this; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v20.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public Builder removeSuggestions(int index) { if (suggestionsBuilder_ == null) { ensureSuggestionsIsMutable(); suggestions_.remove(index); onChanged(); } else { suggestionsBuilder_.remove(index); } return this; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v20.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public com.google.ads.googleads.v20.services.TargetingSuggestionMetrics.Builder getSuggestionsBuilder( int index) { return getSuggestionsFieldBuilder().getBuilder(index); } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v20.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public com.google.ads.googleads.v20.services.TargetingSuggestionMetricsOrBuilder getSuggestionsOrBuilder( int index) { if (suggestionsBuilder_ == null) { return suggestions_.get(index); } else { return suggestionsBuilder_.getMessageOrBuilder(index); } } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v20.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public java.util.List<? extends com.google.ads.googleads.v20.services.TargetingSuggestionMetricsOrBuilder> getSuggestionsOrBuilderList() { if (suggestionsBuilder_ != null) { return suggestionsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(suggestions_); } } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v20.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public com.google.ads.googleads.v20.services.TargetingSuggestionMetrics.Builder addSuggestionsBuilder() { return getSuggestionsFieldBuilder().addBuilder( com.google.ads.googleads.v20.services.TargetingSuggestionMetrics.getDefaultInstance()); } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v20.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public com.google.ads.googleads.v20.services.TargetingSuggestionMetrics.Builder addSuggestionsBuilder( int index) { return getSuggestionsFieldBuilder().addBuilder( index, com.google.ads.googleads.v20.services.TargetingSuggestionMetrics.getDefaultInstance()); } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v20.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public java.util.List<com.google.ads.googleads.v20.services.TargetingSuggestionMetrics.Builder> getSuggestionsBuilderList() { return getSuggestionsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v20.services.TargetingSuggestionMetrics, com.google.ads.googleads.v20.services.TargetingSuggestionMetrics.Builder, com.google.ads.googleads.v20.services.TargetingSuggestionMetricsOrBuilder> getSuggestionsFieldBuilder() { if (suggestionsBuilder_ == null) { suggestionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v20.services.TargetingSuggestionMetrics, com.google.ads.googleads.v20.services.TargetingSuggestionMetrics.Builder, com.google.ads.googleads.v20.services.TargetingSuggestionMetricsOrBuilder>( suggestions_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); suggestions_ = null; } return suggestionsBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse) private static final com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse(); } public static com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<GenerateTargetingSuggestionMetricsResponse> PARSER = new com.google.protobuf.AbstractParser<GenerateTargetingSuggestionMetricsResponse>() { @java.lang.Override public GenerateTargetingSuggestionMetricsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<GenerateTargetingSuggestionMetricsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<GenerateTargetingSuggestionMetricsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v20.services.GenerateTargetingSuggestionMetricsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
36,415
google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/services/GenerateTargetingSuggestionMetricsResponse.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v21/services/audience_insights_service.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v21.services; /** * <pre> * Response message for * [AudienceInsightsService.GenerateTargetingSuggestionMetrics][google.ads.googleads.v21.services.AudienceInsightsService.GenerateTargetingSuggestionMetrics]. * </pre> * * Protobuf type {@code google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse} */ public final class GenerateTargetingSuggestionMetricsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse) GenerateTargetingSuggestionMetricsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use GenerateTargetingSuggestionMetricsResponse.newBuilder() to construct. private GenerateTargetingSuggestionMetricsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private GenerateTargetingSuggestionMetricsResponse() { suggestions_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new GenerateTargetingSuggestionMetricsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v21_services_GenerateTargetingSuggestionMetricsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v21_services_GenerateTargetingSuggestionMetricsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse.class, com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse.Builder.class); } public static final int SUGGESTIONS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.ads.googleads.v21.services.TargetingSuggestionMetrics> suggestions_; /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v21.services.TargetingSuggestionMetrics suggestions = 1;</code> */ @java.lang.Override public java.util.List<com.google.ads.googleads.v21.services.TargetingSuggestionMetrics> getSuggestionsList() { return suggestions_; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v21.services.TargetingSuggestionMetrics suggestions = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.ads.googleads.v21.services.TargetingSuggestionMetricsOrBuilder> getSuggestionsOrBuilderList() { return suggestions_; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v21.services.TargetingSuggestionMetrics suggestions = 1;</code> */ @java.lang.Override public int getSuggestionsCount() { return suggestions_.size(); } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v21.services.TargetingSuggestionMetrics suggestions = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v21.services.TargetingSuggestionMetrics getSuggestions(int index) { return suggestions_.get(index); } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v21.services.TargetingSuggestionMetrics suggestions = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v21.services.TargetingSuggestionMetricsOrBuilder getSuggestionsOrBuilder( int index) { return suggestions_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < suggestions_.size(); i++) { output.writeMessage(1, suggestions_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < suggestions_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, suggestions_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse)) { return super.equals(obj); } com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse other = (com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse) obj; if (!getSuggestionsList() .equals(other.getSuggestionsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getSuggestionsCount() > 0) { hash = (37 * hash) + SUGGESTIONS_FIELD_NUMBER; hash = (53 * hash) + getSuggestionsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Response message for * [AudienceInsightsService.GenerateTargetingSuggestionMetrics][google.ads.googleads.v21.services.AudienceInsightsService.GenerateTargetingSuggestionMetrics]. * </pre> * * Protobuf type {@code google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse) com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v21_services_GenerateTargetingSuggestionMetricsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v21_services_GenerateTargetingSuggestionMetricsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse.class, com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse.Builder.class); } // Construct using com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (suggestionsBuilder_ == null) { suggestions_ = java.util.Collections.emptyList(); } else { suggestions_ = null; suggestionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v21.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v21_services_GenerateTargetingSuggestionMetricsResponse_descriptor; } @java.lang.Override public com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse getDefaultInstanceForType() { return com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse build() { com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse buildPartial() { com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse result = new com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse result) { if (suggestionsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { suggestions_ = java.util.Collections.unmodifiableList(suggestions_); bitField0_ = (bitField0_ & ~0x00000001); } result.suggestions_ = suggestions_; } else { result.suggestions_ = suggestionsBuilder_.build(); } } private void buildPartial0(com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse) { return mergeFrom((com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse other) { if (other == com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse.getDefaultInstance()) return this; if (suggestionsBuilder_ == null) { if (!other.suggestions_.isEmpty()) { if (suggestions_.isEmpty()) { suggestions_ = other.suggestions_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureSuggestionsIsMutable(); suggestions_.addAll(other.suggestions_); } onChanged(); } } else { if (!other.suggestions_.isEmpty()) { if (suggestionsBuilder_.isEmpty()) { suggestionsBuilder_.dispose(); suggestionsBuilder_ = null; suggestions_ = other.suggestions_; bitField0_ = (bitField0_ & ~0x00000001); suggestionsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getSuggestionsFieldBuilder() : null; } else { suggestionsBuilder_.addAllMessages(other.suggestions_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.ads.googleads.v21.services.TargetingSuggestionMetrics m = input.readMessage( com.google.ads.googleads.v21.services.TargetingSuggestionMetrics.parser(), extensionRegistry); if (suggestionsBuilder_ == null) { ensureSuggestionsIsMutable(); suggestions_.add(m); } else { suggestionsBuilder_.addMessage(m); } break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.ads.googleads.v21.services.TargetingSuggestionMetrics> suggestions_ = java.util.Collections.emptyList(); private void ensureSuggestionsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { suggestions_ = new java.util.ArrayList<com.google.ads.googleads.v21.services.TargetingSuggestionMetrics>(suggestions_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v21.services.TargetingSuggestionMetrics, com.google.ads.googleads.v21.services.TargetingSuggestionMetrics.Builder, com.google.ads.googleads.v21.services.TargetingSuggestionMetricsOrBuilder> suggestionsBuilder_; /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v21.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public java.util.List<com.google.ads.googleads.v21.services.TargetingSuggestionMetrics> getSuggestionsList() { if (suggestionsBuilder_ == null) { return java.util.Collections.unmodifiableList(suggestions_); } else { return suggestionsBuilder_.getMessageList(); } } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v21.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public int getSuggestionsCount() { if (suggestionsBuilder_ == null) { return suggestions_.size(); } else { return suggestionsBuilder_.getCount(); } } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v21.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public com.google.ads.googleads.v21.services.TargetingSuggestionMetrics getSuggestions(int index) { if (suggestionsBuilder_ == null) { return suggestions_.get(index); } else { return suggestionsBuilder_.getMessage(index); } } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v21.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public Builder setSuggestions( int index, com.google.ads.googleads.v21.services.TargetingSuggestionMetrics value) { if (suggestionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSuggestionsIsMutable(); suggestions_.set(index, value); onChanged(); } else { suggestionsBuilder_.setMessage(index, value); } return this; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v21.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public Builder setSuggestions( int index, com.google.ads.googleads.v21.services.TargetingSuggestionMetrics.Builder builderForValue) { if (suggestionsBuilder_ == null) { ensureSuggestionsIsMutable(); suggestions_.set(index, builderForValue.build()); onChanged(); } else { suggestionsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v21.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public Builder addSuggestions(com.google.ads.googleads.v21.services.TargetingSuggestionMetrics value) { if (suggestionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSuggestionsIsMutable(); suggestions_.add(value); onChanged(); } else { suggestionsBuilder_.addMessage(value); } return this; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v21.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public Builder addSuggestions( int index, com.google.ads.googleads.v21.services.TargetingSuggestionMetrics value) { if (suggestionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSuggestionsIsMutable(); suggestions_.add(index, value); onChanged(); } else { suggestionsBuilder_.addMessage(index, value); } return this; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v21.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public Builder addSuggestions( com.google.ads.googleads.v21.services.TargetingSuggestionMetrics.Builder builderForValue) { if (suggestionsBuilder_ == null) { ensureSuggestionsIsMutable(); suggestions_.add(builderForValue.build()); onChanged(); } else { suggestionsBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v21.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public Builder addSuggestions( int index, com.google.ads.googleads.v21.services.TargetingSuggestionMetrics.Builder builderForValue) { if (suggestionsBuilder_ == null) { ensureSuggestionsIsMutable(); suggestions_.add(index, builderForValue.build()); onChanged(); } else { suggestionsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v21.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public Builder addAllSuggestions( java.lang.Iterable<? extends com.google.ads.googleads.v21.services.TargetingSuggestionMetrics> values) { if (suggestionsBuilder_ == null) { ensureSuggestionsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, suggestions_); onChanged(); } else { suggestionsBuilder_.addAllMessages(values); } return this; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v21.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public Builder clearSuggestions() { if (suggestionsBuilder_ == null) { suggestions_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { suggestionsBuilder_.clear(); } return this; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v21.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public Builder removeSuggestions(int index) { if (suggestionsBuilder_ == null) { ensureSuggestionsIsMutable(); suggestions_.remove(index); onChanged(); } else { suggestionsBuilder_.remove(index); } return this; } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v21.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public com.google.ads.googleads.v21.services.TargetingSuggestionMetrics.Builder getSuggestionsBuilder( int index) { return getSuggestionsFieldBuilder().getBuilder(index); } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v21.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public com.google.ads.googleads.v21.services.TargetingSuggestionMetricsOrBuilder getSuggestionsOrBuilder( int index) { if (suggestionsBuilder_ == null) { return suggestions_.get(index); } else { return suggestionsBuilder_.getMessageOrBuilder(index); } } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v21.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public java.util.List<? extends com.google.ads.googleads.v21.services.TargetingSuggestionMetricsOrBuilder> getSuggestionsOrBuilderList() { if (suggestionsBuilder_ != null) { return suggestionsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(suggestions_); } } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v21.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public com.google.ads.googleads.v21.services.TargetingSuggestionMetrics.Builder addSuggestionsBuilder() { return getSuggestionsFieldBuilder().addBuilder( com.google.ads.googleads.v21.services.TargetingSuggestionMetrics.getDefaultInstance()); } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v21.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public com.google.ads.googleads.v21.services.TargetingSuggestionMetrics.Builder addSuggestionsBuilder( int index) { return getSuggestionsFieldBuilder().addBuilder( index, com.google.ads.googleads.v21.services.TargetingSuggestionMetrics.getDefaultInstance()); } /** * <pre> * Suggested targetable audiences. There will be one suggestion for each * [GenerateTargetingSuggestionMetricsRequest.audiences] requested, matching * the order requested. * </pre> * * <code>repeated .google.ads.googleads.v21.services.TargetingSuggestionMetrics suggestions = 1;</code> */ public java.util.List<com.google.ads.googleads.v21.services.TargetingSuggestionMetrics.Builder> getSuggestionsBuilderList() { return getSuggestionsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v21.services.TargetingSuggestionMetrics, com.google.ads.googleads.v21.services.TargetingSuggestionMetrics.Builder, com.google.ads.googleads.v21.services.TargetingSuggestionMetricsOrBuilder> getSuggestionsFieldBuilder() { if (suggestionsBuilder_ == null) { suggestionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v21.services.TargetingSuggestionMetrics, com.google.ads.googleads.v21.services.TargetingSuggestionMetrics.Builder, com.google.ads.googleads.v21.services.TargetingSuggestionMetricsOrBuilder>( suggestions_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); suggestions_ = null; } return suggestionsBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse) private static final com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse(); } public static com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<GenerateTargetingSuggestionMetricsResponse> PARSER = new com.google.protobuf.AbstractParser<GenerateTargetingSuggestionMetricsResponse>() { @java.lang.Override public GenerateTargetingSuggestionMetricsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<GenerateTargetingSuggestionMetricsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<GenerateTargetingSuggestionMetricsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v21.services.GenerateTargetingSuggestionMetricsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,182
java-translate/proto-google-cloud-translate-v3beta1/src/main/java/com/google/cloud/translate/v3beta1/DocumentTranslation.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/translate/v3beta1/translation_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.translate.v3beta1; /** * * * <pre> * A translated document message. * </pre> * * Protobuf type {@code google.cloud.translation.v3beta1.DocumentTranslation} */ public final class DocumentTranslation extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.translation.v3beta1.DocumentTranslation) DocumentTranslationOrBuilder { private static final long serialVersionUID = 0L; // Use DocumentTranslation.newBuilder() to construct. private DocumentTranslation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DocumentTranslation() { byteStreamOutputs_ = emptyList(com.google.protobuf.ByteString.class); mimeType_ = ""; detectedLanguageCode_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new DocumentTranslation(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.translate.v3beta1.TranslationServiceProto .internal_static_google_cloud_translation_v3beta1_DocumentTranslation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.translate.v3beta1.TranslationServiceProto .internal_static_google_cloud_translation_v3beta1_DocumentTranslation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.translate.v3beta1.DocumentTranslation.class, com.google.cloud.translate.v3beta1.DocumentTranslation.Builder.class); } public static final int BYTE_STREAM_OUTPUTS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private com.google.protobuf.Internal.ProtobufList<com.google.protobuf.ByteString> byteStreamOutputs_ = emptyList(com.google.protobuf.ByteString.class); /** * * * <pre> * The array of translated documents. It is expected to be size 1 for now. We * may produce multiple translated documents in the future for other type of * file formats. * </pre> * * <code>repeated bytes byte_stream_outputs = 1;</code> * * @return A list containing the byteStreamOutputs. */ @java.lang.Override public java.util.List<com.google.protobuf.ByteString> getByteStreamOutputsList() { return byteStreamOutputs_; } /** * * * <pre> * The array of translated documents. It is expected to be size 1 for now. We * may produce multiple translated documents in the future for other type of * file formats. * </pre> * * <code>repeated bytes byte_stream_outputs = 1;</code> * * @return The count of byteStreamOutputs. */ public int getByteStreamOutputsCount() { return byteStreamOutputs_.size(); } /** * * * <pre> * The array of translated documents. It is expected to be size 1 for now. We * may produce multiple translated documents in the future for other type of * file formats. * </pre> * * <code>repeated bytes byte_stream_outputs = 1;</code> * * @param index The index of the element to return. * @return The byteStreamOutputs at the given index. */ public com.google.protobuf.ByteString getByteStreamOutputs(int index) { return byteStreamOutputs_.get(index); } public static final int MIME_TYPE_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object mimeType_ = ""; /** * * * <pre> * The translated document's mime type. * </pre> * * <code>string mime_type = 2;</code> * * @return The mimeType. */ @java.lang.Override public java.lang.String getMimeType() { java.lang.Object ref = mimeType_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); mimeType_ = s; return s; } } /** * * * <pre> * The translated document's mime type. * </pre> * * <code>string mime_type = 2;</code> * * @return The bytes for mimeType. */ @java.lang.Override public com.google.protobuf.ByteString getMimeTypeBytes() { java.lang.Object ref = mimeType_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); mimeType_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int DETECTED_LANGUAGE_CODE_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object detectedLanguageCode_ = ""; /** * * * <pre> * The detected language for the input document. * If the user did not provide the source language for the input document, * this field will have the language code automatically detected. If the * source language was passed, auto-detection of the language does not occur * and this field is empty. * </pre> * * <code>string detected_language_code = 3;</code> * * @return The detectedLanguageCode. */ @java.lang.Override public java.lang.String getDetectedLanguageCode() { java.lang.Object ref = detectedLanguageCode_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); detectedLanguageCode_ = s; return s; } } /** * * * <pre> * The detected language for the input document. * If the user did not provide the source language for the input document, * this field will have the language code automatically detected. If the * source language was passed, auto-detection of the language does not occur * and this field is empty. * </pre> * * <code>string detected_language_code = 3;</code> * * @return The bytes for detectedLanguageCode. */ @java.lang.Override public com.google.protobuf.ByteString getDetectedLanguageCodeBytes() { java.lang.Object ref = detectedLanguageCode_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); detectedLanguageCode_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < byteStreamOutputs_.size(); i++) { output.writeBytes(1, byteStreamOutputs_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(mimeType_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, mimeType_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(detectedLanguageCode_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, detectedLanguageCode_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < byteStreamOutputs_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream.computeBytesSizeNoTag(byteStreamOutputs_.get(i)); } size += dataSize; size += 1 * getByteStreamOutputsList().size(); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(mimeType_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, mimeType_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(detectedLanguageCode_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, detectedLanguageCode_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.translate.v3beta1.DocumentTranslation)) { return super.equals(obj); } com.google.cloud.translate.v3beta1.DocumentTranslation other = (com.google.cloud.translate.v3beta1.DocumentTranslation) obj; if (!getByteStreamOutputsList().equals(other.getByteStreamOutputsList())) return false; if (!getMimeType().equals(other.getMimeType())) return false; if (!getDetectedLanguageCode().equals(other.getDetectedLanguageCode())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getByteStreamOutputsCount() > 0) { hash = (37 * hash) + BYTE_STREAM_OUTPUTS_FIELD_NUMBER; hash = (53 * hash) + getByteStreamOutputsList().hashCode(); } hash = (37 * hash) + MIME_TYPE_FIELD_NUMBER; hash = (53 * hash) + getMimeType().hashCode(); hash = (37 * hash) + DETECTED_LANGUAGE_CODE_FIELD_NUMBER; hash = (53 * hash) + getDetectedLanguageCode().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.translate.v3beta1.DocumentTranslation parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.translate.v3beta1.DocumentTranslation parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.translate.v3beta1.DocumentTranslation parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.translate.v3beta1.DocumentTranslation parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.translate.v3beta1.DocumentTranslation parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.translate.v3beta1.DocumentTranslation parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.translate.v3beta1.DocumentTranslation parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.translate.v3beta1.DocumentTranslation parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.translate.v3beta1.DocumentTranslation parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.translate.v3beta1.DocumentTranslation parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.translate.v3beta1.DocumentTranslation parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.translate.v3beta1.DocumentTranslation parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.translate.v3beta1.DocumentTranslation prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A translated document message. * </pre> * * Protobuf type {@code google.cloud.translation.v3beta1.DocumentTranslation} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.translation.v3beta1.DocumentTranslation) com.google.cloud.translate.v3beta1.DocumentTranslationOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.translate.v3beta1.TranslationServiceProto .internal_static_google_cloud_translation_v3beta1_DocumentTranslation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.translate.v3beta1.TranslationServiceProto .internal_static_google_cloud_translation_v3beta1_DocumentTranslation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.translate.v3beta1.DocumentTranslation.class, com.google.cloud.translate.v3beta1.DocumentTranslation.Builder.class); } // Construct using com.google.cloud.translate.v3beta1.DocumentTranslation.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; byteStreamOutputs_ = emptyList(com.google.protobuf.ByteString.class); mimeType_ = ""; detectedLanguageCode_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.translate.v3beta1.TranslationServiceProto .internal_static_google_cloud_translation_v3beta1_DocumentTranslation_descriptor; } @java.lang.Override public com.google.cloud.translate.v3beta1.DocumentTranslation getDefaultInstanceForType() { return com.google.cloud.translate.v3beta1.DocumentTranslation.getDefaultInstance(); } @java.lang.Override public com.google.cloud.translate.v3beta1.DocumentTranslation build() { com.google.cloud.translate.v3beta1.DocumentTranslation result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.translate.v3beta1.DocumentTranslation buildPartial() { com.google.cloud.translate.v3beta1.DocumentTranslation result = new com.google.cloud.translate.v3beta1.DocumentTranslation(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.translate.v3beta1.DocumentTranslation result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { byteStreamOutputs_.makeImmutable(); result.byteStreamOutputs_ = byteStreamOutputs_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.mimeType_ = mimeType_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.detectedLanguageCode_ = detectedLanguageCode_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.translate.v3beta1.DocumentTranslation) { return mergeFrom((com.google.cloud.translate.v3beta1.DocumentTranslation) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.translate.v3beta1.DocumentTranslation other) { if (other == com.google.cloud.translate.v3beta1.DocumentTranslation.getDefaultInstance()) return this; if (!other.byteStreamOutputs_.isEmpty()) { if (byteStreamOutputs_.isEmpty()) { byteStreamOutputs_ = other.byteStreamOutputs_; byteStreamOutputs_.makeImmutable(); bitField0_ |= 0x00000001; } else { ensureByteStreamOutputsIsMutable(); byteStreamOutputs_.addAll(other.byteStreamOutputs_); } onChanged(); } if (!other.getMimeType().isEmpty()) { mimeType_ = other.mimeType_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getDetectedLanguageCode().isEmpty()) { detectedLanguageCode_ = other.detectedLanguageCode_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.protobuf.ByteString v = input.readBytes(); ensureByteStreamOutputsIsMutable(); byteStreamOutputs_.add(v); break; } // case 10 case 18: { mimeType_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { detectedLanguageCode_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.protobuf.Internal.ProtobufList<com.google.protobuf.ByteString> byteStreamOutputs_ = emptyList(com.google.protobuf.ByteString.class); private void ensureByteStreamOutputsIsMutable() { if (!byteStreamOutputs_.isModifiable()) { byteStreamOutputs_ = makeMutableCopy(byteStreamOutputs_); } bitField0_ |= 0x00000001; } /** * * * <pre> * The array of translated documents. It is expected to be size 1 for now. We * may produce multiple translated documents in the future for other type of * file formats. * </pre> * * <code>repeated bytes byte_stream_outputs = 1;</code> * * @return A list containing the byteStreamOutputs. */ public java.util.List<com.google.protobuf.ByteString> getByteStreamOutputsList() { byteStreamOutputs_.makeImmutable(); return byteStreamOutputs_; } /** * * * <pre> * The array of translated documents. It is expected to be size 1 for now. We * may produce multiple translated documents in the future for other type of * file formats. * </pre> * * <code>repeated bytes byte_stream_outputs = 1;</code> * * @return The count of byteStreamOutputs. */ public int getByteStreamOutputsCount() { return byteStreamOutputs_.size(); } /** * * * <pre> * The array of translated documents. It is expected to be size 1 for now. We * may produce multiple translated documents in the future for other type of * file formats. * </pre> * * <code>repeated bytes byte_stream_outputs = 1;</code> * * @param index The index of the element to return. * @return The byteStreamOutputs at the given index. */ public com.google.protobuf.ByteString getByteStreamOutputs(int index) { return byteStreamOutputs_.get(index); } /** * * * <pre> * The array of translated documents. It is expected to be size 1 for now. We * may produce multiple translated documents in the future for other type of * file formats. * </pre> * * <code>repeated bytes byte_stream_outputs = 1;</code> * * @param index The index to set the value at. * @param value The byteStreamOutputs to set. * @return This builder for chaining. */ public Builder setByteStreamOutputs(int index, com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureByteStreamOutputsIsMutable(); byteStreamOutputs_.set(index, value); bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The array of translated documents. It is expected to be size 1 for now. We * may produce multiple translated documents in the future for other type of * file formats. * </pre> * * <code>repeated bytes byte_stream_outputs = 1;</code> * * @param value The byteStreamOutputs to add. * @return This builder for chaining. */ public Builder addByteStreamOutputs(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureByteStreamOutputsIsMutable(); byteStreamOutputs_.add(value); bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The array of translated documents. It is expected to be size 1 for now. We * may produce multiple translated documents in the future for other type of * file formats. * </pre> * * <code>repeated bytes byte_stream_outputs = 1;</code> * * @param values The byteStreamOutputs to add. * @return This builder for chaining. */ public Builder addAllByteStreamOutputs( java.lang.Iterable<? extends com.google.protobuf.ByteString> values) { ensureByteStreamOutputsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, byteStreamOutputs_); bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The array of translated documents. It is expected to be size 1 for now. We * may produce multiple translated documents in the future for other type of * file formats. * </pre> * * <code>repeated bytes byte_stream_outputs = 1;</code> * * @return This builder for chaining. */ public Builder clearByteStreamOutputs() { byteStreamOutputs_ = emptyList(com.google.protobuf.ByteString.class); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } private java.lang.Object mimeType_ = ""; /** * * * <pre> * The translated document's mime type. * </pre> * * <code>string mime_type = 2;</code> * * @return The mimeType. */ public java.lang.String getMimeType() { java.lang.Object ref = mimeType_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); mimeType_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The translated document's mime type. * </pre> * * <code>string mime_type = 2;</code> * * @return The bytes for mimeType. */ public com.google.protobuf.ByteString getMimeTypeBytes() { java.lang.Object ref = mimeType_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); mimeType_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The translated document's mime type. * </pre> * * <code>string mime_type = 2;</code> * * @param value The mimeType to set. * @return This builder for chaining. */ public Builder setMimeType(java.lang.String value) { if (value == null) { throw new NullPointerException(); } mimeType_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The translated document's mime type. * </pre> * * <code>string mime_type = 2;</code> * * @return This builder for chaining. */ public Builder clearMimeType() { mimeType_ = getDefaultInstance().getMimeType(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The translated document's mime type. * </pre> * * <code>string mime_type = 2;</code> * * @param value The bytes for mimeType to set. * @return This builder for chaining. */ public Builder setMimeTypeBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); mimeType_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object detectedLanguageCode_ = ""; /** * * * <pre> * The detected language for the input document. * If the user did not provide the source language for the input document, * this field will have the language code automatically detected. If the * source language was passed, auto-detection of the language does not occur * and this field is empty. * </pre> * * <code>string detected_language_code = 3;</code> * * @return The detectedLanguageCode. */ public java.lang.String getDetectedLanguageCode() { java.lang.Object ref = detectedLanguageCode_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); detectedLanguageCode_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The detected language for the input document. * If the user did not provide the source language for the input document, * this field will have the language code automatically detected. If the * source language was passed, auto-detection of the language does not occur * and this field is empty. * </pre> * * <code>string detected_language_code = 3;</code> * * @return The bytes for detectedLanguageCode. */ public com.google.protobuf.ByteString getDetectedLanguageCodeBytes() { java.lang.Object ref = detectedLanguageCode_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); detectedLanguageCode_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The detected language for the input document. * If the user did not provide the source language for the input document, * this field will have the language code automatically detected. If the * source language was passed, auto-detection of the language does not occur * and this field is empty. * </pre> * * <code>string detected_language_code = 3;</code> * * @param value The detectedLanguageCode to set. * @return This builder for chaining. */ public Builder setDetectedLanguageCode(java.lang.String value) { if (value == null) { throw new NullPointerException(); } detectedLanguageCode_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The detected language for the input document. * If the user did not provide the source language for the input document, * this field will have the language code automatically detected. If the * source language was passed, auto-detection of the language does not occur * and this field is empty. * </pre> * * <code>string detected_language_code = 3;</code> * * @return This builder for chaining. */ public Builder clearDetectedLanguageCode() { detectedLanguageCode_ = getDefaultInstance().getDetectedLanguageCode(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * The detected language for the input document. * If the user did not provide the source language for the input document, * this field will have the language code automatically detected. If the * source language was passed, auto-detection of the language does not occur * and this field is empty. * </pre> * * <code>string detected_language_code = 3;</code> * * @param value The bytes for detectedLanguageCode to set. * @return This builder for chaining. */ public Builder setDetectedLanguageCodeBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); detectedLanguageCode_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.translation.v3beta1.DocumentTranslation) } // @@protoc_insertion_point(class_scope:google.cloud.translation.v3beta1.DocumentTranslation) private static final com.google.cloud.translate.v3beta1.DocumentTranslation DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.translate.v3beta1.DocumentTranslation(); } public static com.google.cloud.translate.v3beta1.DocumentTranslation getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DocumentTranslation> PARSER = new com.google.protobuf.AbstractParser<DocumentTranslation>() { @java.lang.Override public DocumentTranslation parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<DocumentTranslation> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DocumentTranslation> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.translate.v3beta1.DocumentTranslation getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
openjdk/jdk8
36,410
jdk/src/share/classes/javax/management/openmbean/TabularDataSupport.java
/* * Copyright (c) 2000, 2011, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package javax.management.openmbean; // java import // import com.sun.jmx.mbeanserver.GetPropertyAction; import com.sun.jmx.mbeanserver.Util; import java.io.IOException; import java.io.ObjectInputStream; import java.io.Serializable; import java.security.AccessController; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; // jmx import // /** * The <tt>TabularDataSupport</tt> class is the <i>open data</i> class which implements the <tt>TabularData</tt> * and the <tt>Map</tt> interfaces, and which is internally based on a hash map data structure. * * @since 1.5 */ /* It would make much more sense to implement Map<List<?>,CompositeData> here, but unfortunately we cannot for compatibility reasons. If we did that, then we would have to define e.g. CompositeData remove(Object) instead of Object remove(Object). That would mean that if any existing code subclassed TabularDataSupport and overrode Object remove(Object), it would (a) no longer compile and (b) not actually override CompositeData remove(Object) in binaries compiled before the change. */ public class TabularDataSupport implements TabularData, Map<Object,Object>, Cloneable, Serializable { /* Serial version */ static final long serialVersionUID = 5720150593236309827L; /** * @serial This tabular data instance's contents: a {@link HashMap} */ // field cannot be final because of clone method private Map<Object,CompositeData> dataMap; /** * @serial This tabular data instance's tabular type */ private final TabularType tabularType; /** * The array of item names that define the index used for rows (convenience field) */ private transient String[] indexNamesArray; /* *** Constructors *** */ /** * Creates an empty <tt>TabularDataSupport</tt> instance whose open-type is <var>tabularType</var>, * and whose underlying <tt>HashMap</tt> has a default initial capacity (101) and default load factor (0.75). * <p> * This constructor simply calls <tt>this(tabularType, 101, 0.75f);</tt> * * @param tabularType the <i>tabular type</i> describing this <tt>TabularData</tt> instance; * cannot be null. * * @throws IllegalArgumentException if the tabular type is null. */ public TabularDataSupport(TabularType tabularType) { this(tabularType, 16, 0.75f); } /** * Creates an empty <tt>TabularDataSupport</tt> instance whose open-type is <var>tabularType</var>, * and whose underlying <tt>HashMap</tt> has the specified initial capacity and load factor. * * @param tabularType the <i>tabular type</i> describing this <tt>TabularData</tt> instance; * cannot be null. * * @param initialCapacity the initial capacity of the HashMap. * * @param loadFactor the load factor of the HashMap * * @throws IllegalArgumentException if the initial capacity is less than zero, * or the load factor is nonpositive, * or the tabular type is null. */ public TabularDataSupport(TabularType tabularType, int initialCapacity, float loadFactor) { // Check tabularType is not null // if (tabularType == null) { throw new IllegalArgumentException("Argument tabularType cannot be null."); } // Initialize this.tabularType (and indexNamesArray for convenience) // this.tabularType = tabularType; List<String> tmpNames = tabularType.getIndexNames(); this.indexNamesArray = tmpNames.toArray(new String[tmpNames.size()]); // Since LinkedHashMap was introduced in SE 1.4, it's conceivable even // if very unlikely that we might be the server of a 1.3 client. In // that case you'll need to set this property. See CR 6334663. String useHashMapProp = AccessController.doPrivileged( new GetPropertyAction("jmx.tabular.data.hash.map")); boolean useHashMap = "true".equalsIgnoreCase(useHashMapProp); // Construct the empty contents HashMap // this.dataMap = useHashMap ? new HashMap<Object,CompositeData>(initialCapacity, loadFactor) : new LinkedHashMap<Object, CompositeData>(initialCapacity, loadFactor); } /* *** TabularData specific information methods *** */ /** * Returns the <i>tabular type</i> describing this <tt>TabularData</tt> instance. */ public TabularType getTabularType() { return tabularType; } /** * Calculates the index that would be used in this <tt>TabularData</tt> instance to refer to the specified * composite data <var>value</var> parameter if it were added to this instance. * This method checks for the type validity of the specified <var>value</var>, * but does not check if the calculated index is already used to refer to a value in this <tt>TabularData</tt> instance. * * @param value the composite data value whose index in this * <tt>TabularData</tt> instance is to be calculated; * must be of the same composite type as this instance's row type; * must not be null. * * @return the index that the specified <var>value</var> would have in this <tt>TabularData</tt> instance. * * @throws NullPointerException if <var>value</var> is <tt>null</tt>. * * @throws InvalidOpenTypeException if <var>value</var> does not conform to this <tt>TabularData</tt> instance's * row type definition. */ public Object[] calculateIndex(CompositeData value) { // Check value is valid // checkValueType(value); // Return its calculated index // return internalCalculateIndex(value).toArray(); } /* *** Content information query methods *** */ /** * Returns <tt>true</tt> if and only if this <tt>TabularData</tt> instance contains a <tt>CompositeData</tt> value * (ie a row) whose index is the specified <var>key</var>. If <var>key</var> cannot be cast to a one dimension array * of Object instances, this method simply returns <tt>false</tt>; otherwise it returns the the result of the call to * <tt>this.containsKey((Object[]) key)</tt>. * * @param key the index value whose presence in this <tt>TabularData</tt> instance is to be tested. * * @return <tt>true</tt> if this <tt>TabularData</tt> indexes a row value with the specified key. */ public boolean containsKey(Object key) { // if key is not an array of Object instances, return false // Object[] k; try { k = (Object[]) key; } catch (ClassCastException e) { return false; } return this.containsKey(k); } /** * Returns <tt>true</tt> if and only if this <tt>TabularData</tt> instance contains a <tt>CompositeData</tt> value * (ie a row) whose index is the specified <var>key</var>. If <var>key</var> is <tt>null</tt> or does not conform to * this <tt>TabularData</tt> instance's <tt>TabularType</tt> definition, this method simply returns <tt>false</tt>. * * @param key the index value whose presence in this <tt>TabularData</tt> instance is to be tested. * * @return <tt>true</tt> if this <tt>TabularData</tt> indexes a row value with the specified key. */ public boolean containsKey(Object[] key) { return ( key == null ? false : dataMap.containsKey(Arrays.asList(key))); } /** * Returns <tt>true</tt> if and only if this <tt>TabularData</tt> instance contains the specified * <tt>CompositeData</tt> value. If <var>value</var> is <tt>null</tt> or does not conform to * this <tt>TabularData</tt> instance's row type definition, this method simply returns <tt>false</tt>. * * @param value the row value whose presence in this <tt>TabularData</tt> instance is to be tested. * * @return <tt>true</tt> if this <tt>TabularData</tt> instance contains the specified row value. */ public boolean containsValue(CompositeData value) { return dataMap.containsValue(value); } /** * Returns <tt>true</tt> if and only if this <tt>TabularData</tt> instance contains the specified * value. * * @param value the row value whose presence in this <tt>TabularData</tt> instance is to be tested. * * @return <tt>true</tt> if this <tt>TabularData</tt> instance contains the specified row value. */ public boolean containsValue(Object value) { return dataMap.containsValue(value); } /** * This method simply calls <tt>get((Object[]) key)</tt>. * * @throws NullPointerException if the <var>key</var> is <tt>null</tt> * @throws ClassCastException if the <var>key</var> is not of the type <tt>Object[]</tt> * @throws InvalidKeyException if the <var>key</var> does not conform to this <tt>TabularData</tt> instance's * <tt>TabularType</tt> definition */ public Object get(Object key) { return get((Object[]) key); } /** * Returns the <tt>CompositeData</tt> value whose index is * <var>key</var>, or <tt>null</tt> if there is no value mapping * to <var>key</var>, in this <tt>TabularData</tt> instance. * * @param key the index of the value to get in this * <tt>TabularData</tt> instance; * must be valid with this * <tt>TabularData</tt> instance's row type definition; * must not * be null. * * @return the value corresponding to <var>key</var>. * * @throws NullPointerException if the <var>key</var> is <tt>null</tt> * @throws InvalidKeyException if the <var>key</var> does not conform to this <tt>TabularData</tt> instance's * <tt>TabularType</tt> type definition. */ public CompositeData get(Object[] key) { // Check key is not null and valid with tabularType // (throws NullPointerException, InvalidKeyException) // checkKeyType(key); // Return the mapping stored in the parent HashMap // return dataMap.get(Arrays.asList(key)); } /* *** Content modification operations (one element at a time) *** */ /** * This method simply calls <tt>put((CompositeData) value)</tt> and * therefore ignores its <var>key</var> parameter which can be <tt>null</tt>. * * @param key an ignored parameter. * @param value the {@link CompositeData} to put. * * @return the value which is put * * @throws NullPointerException if the <var>value</var> is <tt>null</tt> * @throws ClassCastException if the <var>value</var> is not of * the type <tt>CompositeData</tt> * @throws InvalidOpenTypeException if the <var>value</var> does * not conform to this <tt>TabularData</tt> instance's * <tt>TabularType</tt> definition * @throws KeyAlreadyExistsException if the key for the * <var>value</var> parameter, calculated according to this * <tt>TabularData</tt> instance's <tt>TabularType</tt> definition * already maps to an existing value */ public Object put(Object key, Object value) { internalPut((CompositeData) value); return value; // should be return internalPut(...); (5090566) } public void put(CompositeData value) { internalPut(value); } private CompositeData internalPut(CompositeData value) { // Check value is not null, value's type is the same as this instance's row type, // and calculate the value's index according to this instance's tabularType and // check it is not already used for a mapping in the parent HashMap // List<?> index = checkValueAndIndex(value); // store the (key, value) mapping in the dataMap HashMap // return dataMap.put(index, value); } /** * This method simply calls <tt>remove((Object[]) key)</tt>. * * @param key an <tt>Object[]</tt> representing the key to remove. * * @return previous value associated with specified key, or <tt>null</tt> * if there was no mapping for key. * * @throws NullPointerException if the <var>key</var> is <tt>null</tt> * @throws ClassCastException if the <var>key</var> is not of the type <tt>Object[]</tt> * @throws InvalidKeyException if the <var>key</var> does not conform to this <tt>TabularData</tt> instance's * <tt>TabularType</tt> definition */ public Object remove(Object key) { return remove((Object[]) key); } /** * Removes the <tt>CompositeData</tt> value whose index is <var>key</var> from this <tt>TabularData</tt> instance, * and returns the removed value, or returns <tt>null</tt> if there is no value whose index is <var>key</var>. * * @param key the index of the value to get in this <tt>TabularData</tt> instance; * must be valid with this <tt>TabularData</tt> instance's row type definition; * must not be null. * * @return previous value associated with specified key, or <tt>null</tt> * if there was no mapping for key. * * @throws NullPointerException if the <var>key</var> is <tt>null</tt> * @throws InvalidKeyException if the <var>key</var> does not conform to this <tt>TabularData</tt> instance's * <tt>TabularType</tt> definition */ public CompositeData remove(Object[] key) { // Check key is not null and valid with tabularType // (throws NullPointerException, InvalidKeyException) // checkKeyType(key); // Removes the (key, value) mapping in the parent HashMap // return dataMap.remove(Arrays.asList(key)); } /* *** Content modification bulk operations *** */ /** * Add all the values contained in the specified map <var>t</var> * to this <tt>TabularData</tt> instance. This method converts * the collection of values contained in this map into an array of * <tt>CompositeData</tt> values, if possible, and then call the * method <tt>putAll(CompositeData[])</tt>. Note that the keys * used in the specified map <var>t</var> are ignored. This method * allows, for example to add the content of another * <tt>TabularData</tt> instance with the same row type (but * possibly different index names) into this instance. * * @param t the map whose values are to be added as new rows to * this <tt>TabularData</tt> instance; if <var>t</var> is * <tt>null</tt> or empty, this method returns without doing * anything. * * @throws NullPointerException if a value in <var>t</var> is * <tt>null</tt>. * @throws ClassCastException if a value in <var>t</var> is not an * instance of <tt>CompositeData</tt>. * @throws InvalidOpenTypeException if a value in <var>t</var> * does not conform to this <tt>TabularData</tt> instance's row * type definition. * @throws KeyAlreadyExistsException if the index for a value in * <var>t</var>, calculated according to this * <tt>TabularData</tt> instance's <tt>TabularType</tt> definition * already maps to an existing value in this instance, or two * values in <var>t</var> have the same index. */ public void putAll(Map<?,?> t) { // if t is null or empty, just return // if ( (t == null) || (t.size() == 0) ) { return; } // Convert the values in t into an array of <tt>CompositeData</tt> // CompositeData[] values; try { values = t.values().toArray(new CompositeData[t.size()]); } catch (java.lang.ArrayStoreException e) { throw new ClassCastException("Map argument t contains values which are not instances of <tt>CompositeData</tt>"); } // Add the array of values // putAll(values); } /** * Add all the elements in <var>values</var> to this * <tt>TabularData</tt> instance. If any element in * <var>values</var> does not satisfy the constraints defined in * {@link #put(CompositeData) <tt>put</tt>}, or if any two * elements in <var>values</var> have the same index calculated * according to this <tt>TabularData</tt> instance's * <tt>TabularType</tt> definition, then an exception describing * the failure is thrown and no element of <var>values</var> is * added, thus leaving this <tt>TabularData</tt> instance * unchanged. * * @param values the array of composite data values to be added as * new rows to this <tt>TabularData</tt> instance; if * <var>values</var> is <tt>null</tt> or empty, this method * returns without doing anything. * * @throws NullPointerException if an element of <var>values</var> * is <tt>null</tt> * @throws InvalidOpenTypeException if an element of * <var>values</var> does not conform to this * <tt>TabularData</tt> instance's row type definition (ie its * <tt>TabularType</tt> definition) * @throws KeyAlreadyExistsException if the index for an element * of <var>values</var>, calculated according to this * <tt>TabularData</tt> instance's <tt>TabularType</tt> definition * already maps to an existing value in this instance, or two * elements of <var>values</var> have the same index */ public void putAll(CompositeData[] values) { // if values is null or empty, just return // if ( (values == null) || (values.length == 0) ) { return; } // create the list of indexes corresponding to each value List<List<?>> indexes = new ArrayList<List<?>>(values.length + 1); // Check all elements in values and build index list // List<?> index; for (int i=0; i<values.length; i++) { // check value and calculate index index = checkValueAndIndex(values[i]); // check index is different of those previously calculated if (indexes.contains(index)) { throw new KeyAlreadyExistsException("Argument elements values["+ i +"] and values["+ indexes.indexOf(index) + "] have the same indexes, "+ "calculated according to this TabularData instance's tabularType."); } // add to index list indexes.add(index); } // store all (index, value) mappings in the dataMap HashMap // for (int i=0; i<values.length; i++) { dataMap.put(indexes.get(i), values[i]); } } /** * Removes all rows from this <code>TabularDataSupport</code> instance. */ public void clear() { dataMap.clear(); } /* *** Informational methods from java.util.Map *** */ /** * Returns the number of rows in this <code>TabularDataSupport</code> instance. * * @return the number of rows in this <code>TabularDataSupport</code> instance. */ public int size() { return dataMap.size(); } /** * Returns <tt>true</tt> if this <code>TabularDataSupport</code> instance contains no rows. * * @return <tt>true</tt> if this <code>TabularDataSupport</code> instance contains no rows. */ public boolean isEmpty() { return (this.size() == 0); } /* *** Collection views from java.util.Map *** */ /** * Returns a set view of the keys contained in the underlying map of this * {@code TabularDataSupport} instance used to index the rows. * Each key contained in this {@code Set} is an unmodifiable {@code List<?>} * so the returned set view is a {@code Set<List<?>>} but is declared as a * {@code Set<Object>} for compatibility reasons. * The set is backed by the underlying map of this * {@code TabularDataSupport} instance, so changes to the * {@code TabularDataSupport} instance are reflected in the * set, and vice-versa. * * The set supports element removal, which removes the corresponding * row from this {@code TabularDataSupport} instance, via the * {@link Iterator#remove}, {@link Set#remove}, {@link Set#removeAll}, * {@link Set#retainAll}, and {@link Set#clear} operations. It does * not support the {@link Set#add} or {@link Set#addAll} operations. * * @return a set view ({@code Set<List<?>>}) of the keys used to index * the rows of this {@code TabularDataSupport} instance. */ public Set<Object> keySet() { return dataMap.keySet() ; } /** * Returns a collection view of the rows contained in this * {@code TabularDataSupport} instance. The returned {@code Collection} * is a {@code Collection<CompositeData>} but is declared as a * {@code Collection<Object>} for compatibility reasons. * The returned collection can be used to iterate over the values. * The collection is backed by the underlying map, so changes to the * {@code TabularDataSupport} instance are reflected in the collection, * and vice-versa. * * The collection supports element removal, which removes the corresponding * index to row mapping from this {@code TabularDataSupport} instance, via * the {@link Iterator#remove}, {@link Collection#remove}, * {@link Collection#removeAll}, {@link Collection#retainAll}, * and {@link Collection#clear} operations. It does not support * the {@link Collection#add} or {@link Collection#addAll} operations. * * @return a collection view ({@code Collection<CompositeData>}) of * the values contained in this {@code TabularDataSupport} instance. */ @SuppressWarnings("unchecked") // historical confusion about the return type public Collection<Object> values() { return Util.cast(dataMap.values()); } /** * Returns a collection view of the index to row mappings * contained in this {@code TabularDataSupport} instance. * Each element in the returned collection is * a {@code Map.Entry<List<?>,CompositeData>} but * is declared as a {@code Map.Entry<Object,Object>} * for compatibility reasons. Each of the map entry * keys is an unmodifiable {@code List<?>}. * The collection is backed by the underlying map of this * {@code TabularDataSupport} instance, so changes to the * {@code TabularDataSupport} instance are reflected in * the collection, and vice-versa. * The collection supports element removal, which removes * the corresponding mapping from the map, via the * {@link Iterator#remove}, {@link Collection#remove}, * {@link Collection#removeAll}, {@link Collection#retainAll}, * and {@link Collection#clear} operations. It does not support * the {@link Collection#add} or {@link Collection#addAll} * operations. * <p> * <b>IMPORTANT NOTICE</b>: Do not use the {@code setValue} method of the * {@code Map.Entry} elements contained in the returned collection view. * Doing so would corrupt the index to row mappings contained in this * {@code TabularDataSupport} instance. * * @return a collection view ({@code Set<Map.Entry<List<?>,CompositeData>>}) * of the mappings contained in this map. * @see java.util.Map.Entry */ @SuppressWarnings("unchecked") // historical confusion about the return type public Set<Map.Entry<Object,Object>> entrySet() { return Util.cast(dataMap.entrySet()); } /* *** Commodity methods from java.lang.Object *** */ /** * Returns a clone of this <code>TabularDataSupport</code> instance: * the clone is obtained by calling <tt>super.clone()</tt>, and then cloning the underlying map. * Only a shallow clone of the underlying map is made, i.e. no cloning of the indexes and row values is made as they are immutable. */ /* We cannot use covariance here and return TabularDataSupport because this would fail with existing code that subclassed TabularDataSupport and overrode Object clone(). It would not override the new clone(). */ public Object clone() { try { TabularDataSupport c = (TabularDataSupport) super.clone(); c.dataMap = new HashMap<Object,CompositeData>(c.dataMap); return c; } catch (CloneNotSupportedException e) { throw new InternalError(e.toString(), e); } } /** * Compares the specified <var>obj</var> parameter with this <code>TabularDataSupport</code> instance for equality. * <p> * Returns <tt>true</tt> if and only if all of the following statements are true: * <ul> * <li><var>obj</var> is non null,</li> * <li><var>obj</var> also implements the <code>TabularData</code> interface,</li> * <li>their tabular types are equal</li> * <li>their contents (ie all CompositeData values) are equal.</li> * </ul> * This ensures that this <tt>equals</tt> method works properly for <var>obj</var> parameters which are * different implementations of the <code>TabularData</code> interface. * <br>&nbsp; * @param obj the object to be compared for equality with this <code>TabularDataSupport</code> instance; * * @return <code>true</code> if the specified object is equal to this <code>TabularDataSupport</code> instance. */ public boolean equals(Object obj) { // if obj is null, return false // if (obj == null) { return false; } // if obj is not a TabularData, return false // TabularData other; try { other = (TabularData) obj; } catch (ClassCastException e) { return false; } // Now, really test for equality between this TabularData implementation and the other: // // their tabularType should be equal if ( ! this.getTabularType().equals(other.getTabularType()) ) { return false; } // their contents should be equal: // . same size // . values in this instance are in the other (we know there are no duplicate elements possible) // (row values comparison is enough, because keys are calculated according to tabularType) if (this.size() != other.size()) { return false; } for (CompositeData value : dataMap.values()) { if ( ! other.containsValue(value) ) { return false; } } // All tests for equality were successfull // return true; } /** * Returns the hash code value for this <code>TabularDataSupport</code> instance. * <p> * The hash code of a <code>TabularDataSupport</code> instance is the sum of the hash codes * of all elements of information used in <code>equals</code> comparisons * (ie: its <i>tabular type</i> and its content, where the content is defined as all the CompositeData values). * <p> * This ensures that <code> t1.equals(t2) </code> implies that <code> t1.hashCode()==t2.hashCode() </code> * for any two <code>TabularDataSupport</code> instances <code>t1</code> and <code>t2</code>, * as required by the general contract of the method * {@link Object#hashCode() Object.hashCode()}. * <p> * However, note that another instance of a class implementing the <code>TabularData</code> interface * may be equal to this <code>TabularDataSupport</code> instance as defined by {@link #equals}, * but may have a different hash code if it is calculated differently. * * @return the hash code value for this <code>TabularDataSupport</code> instance */ public int hashCode() { int result = 0; result += this.tabularType.hashCode(); for (Object value : values()) result += value.hashCode(); return result; } /** * Returns a string representation of this <code>TabularDataSupport</code> instance. * <p> * The string representation consists of the name of this class (ie <code>javax.management.openmbean.TabularDataSupport</code>), * the string representation of the tabular type of this instance, and the string representation of the contents * (ie list the key=value mappings as returned by a call to * <tt>dataMap.</tt>{@link java.util.HashMap#toString() toString()}). * * @return a string representation of this <code>TabularDataSupport</code> instance */ public String toString() { return new StringBuilder() .append(this.getClass().getName()) .append("(tabularType=") .append(tabularType.toString()) .append(",contents=") .append(dataMap.toString()) .append(")") .toString(); } /* *** TabularDataSupport internal utility methods *** */ /** * Returns the index for value, assuming value is valid for this <tt>TabularData</tt> instance * (ie value is not null, and its composite type is equal to row type). * * The index is a List, and not an array, so that an index.equals(otherIndex) call will actually compare contents, * not just the objects references as is done for an array object. * * The returned List is unmodifiable so that once a row has been put into the dataMap, its index cannot be modified, * for example by a user that would attempt to modify an index contained in the Set returned by keySet(). */ private List<?> internalCalculateIndex(CompositeData value) { return Collections.unmodifiableList(Arrays.asList(value.getAll(this.indexNamesArray))); } /** * Checks if the specified key is valid for this <tt>TabularData</tt> instance. * * @throws NullPointerException * @throws InvalidOpenTypeException */ private void checkKeyType(Object[] key) { // Check key is neither null nor empty // if ( (key == null) || (key.length == 0) ) { throw new NullPointerException("Argument key cannot be null or empty."); } /* Now check key is valid with tabularType index and row type definitions: */ // key[] should have the size expected for an index // if (key.length != this.indexNamesArray.length) { throw new InvalidKeyException("Argument key's length="+ key.length + " is different from the number of item values, which is "+ indexNamesArray.length + ", specified for the indexing rows in this TabularData instance."); } // each element in key[] should be a value for its corresponding open type specified in rowType // OpenType<?> keyElementType; for (int i=0; i<key.length; i++) { keyElementType = tabularType.getRowType().getType(this.indexNamesArray[i]); if ( (key[i] != null) && (! keyElementType.isValue(key[i])) ) { throw new InvalidKeyException("Argument element key["+ i +"] is not a value for the open type expected for "+ "this element of the index, whose name is \""+ indexNamesArray[i] + "\" and whose open type is "+ keyElementType); } } } /** * Checks the specified value's type is valid for this <tt>TabularData</tt> instance * (ie value is not null, and its composite type is equal to row type). * * @throws NullPointerException * @throws InvalidOpenTypeException */ private void checkValueType(CompositeData value) { // Check value is not null // if (value == null) { throw new NullPointerException("Argument value cannot be null."); } // if value's type is not the same as this instance's row type, throw InvalidOpenTypeException // if (!tabularType.getRowType().isValue(value)) { throw new InvalidOpenTypeException("Argument value's composite type ["+ value.getCompositeType() + "] is not assignable to "+ "this TabularData instance's row type ["+ tabularType.getRowType() +"]."); } } /** * Checks if the specified value can be put (ie added) in this <tt>TabularData</tt> instance * (ie value is not null, its composite type is equal to row type, and its index is not already used), * and returns the index calculated for this value. * * The index is a List, and not an array, so that an index.equals(otherIndex) call will actually compare contents, * not just the objects references as is done for an array object. * * @throws NullPointerException * @throws InvalidOpenTypeException * @throws KeyAlreadyExistsException */ private List<?> checkValueAndIndex(CompositeData value) { // Check value is valid // checkValueType(value); // Calculate value's index according to this instance's tabularType // and check it is not already used for a mapping in the parent HashMap // List<?> index = internalCalculateIndex(value); if (dataMap.containsKey(index)) { throw new KeyAlreadyExistsException("Argument value's index, calculated according to this TabularData "+ "instance's tabularType, already refers to a value in this table."); } // The check is OK, so return the index // return index; } /** * Deserializes a {@link TabularDataSupport} from an {@link ObjectInputStream}. */ private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); List<String> tmpNames = tabularType.getIndexNames(); indexNamesArray = tmpNames.toArray(new String[tmpNames.size()]); } }
apache/kyuubi
35,846
kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/adapter/SQLCallableStatement.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kyuubi.jdbc.hive.adapter; import java.io.InputStream; import java.io.Reader; import java.math.BigDecimal; import java.net.URL; import java.sql.*; import java.util.Calendar; import java.util.Map; @SuppressWarnings("deprecation") public interface SQLCallableStatement extends CallableStatement { @Override default void registerOutParameter(int parameterIndex, int sqlType) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void registerOutParameter(int parameterIndex, int sqlType, int scale) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default boolean wasNull() throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default String getString(int parameterIndex) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default boolean getBoolean(int parameterIndex) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default byte getByte(int parameterIndex) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default short getShort(int parameterIndex) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default int getInt(int parameterIndex) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default long getLong(int parameterIndex) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default float getFloat(int parameterIndex) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default double getDouble(int parameterIndex) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default BigDecimal getBigDecimal(int parameterIndex, int scale) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default byte[] getBytes(int parameterIndex) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default Date getDate(int parameterIndex) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default Time getTime(int parameterIndex) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default Timestamp getTimestamp(int parameterIndex) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default Object getObject(int parameterIndex) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default BigDecimal getBigDecimal(int parameterIndex) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default Object getObject(int parameterIndex, Map<String, Class<?>> map) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default Ref getRef(int parameterIndex) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default Blob getBlob(int parameterIndex) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default Clob getClob(int parameterIndex) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default Array getArray(int parameterIndex) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default Date getDate(int parameterIndex, Calendar cal) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default Time getTime(int parameterIndex, Calendar cal) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default Timestamp getTimestamp(int parameterIndex, Calendar cal) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void registerOutParameter(int parameterIndex, int sqlType, String typeName) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void registerOutParameter(String parameterName, int sqlType) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void registerOutParameter(String parameterName, int sqlType, int scale) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void registerOutParameter(String parameterName, int sqlType, String typeName) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default URL getURL(int parameterIndex) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setURL(String parameterName, URL val) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setNull(String parameterName, int sqlType) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setBoolean(String parameterName, boolean x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setByte(String parameterName, byte x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setShort(String parameterName, short x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setInt(String parameterName, int x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setLong(String parameterName, long x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setFloat(String parameterName, float x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setDouble(String parameterName, double x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setBigDecimal(String parameterName, BigDecimal x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setString(String parameterName, String x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setBytes(String parameterName, byte[] x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setDate(String parameterName, Date x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setTime(String parameterName, Time x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setTimestamp(String parameterName, Timestamp x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setAsciiStream(String parameterName, InputStream x, int length) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setBinaryStream(String parameterName, InputStream x, int length) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setObject(String parameterName, Object x, int targetSqlType, int scale) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setObject(String parameterName, Object x, int targetSqlType) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setObject(String parameterName, Object x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setCharacterStream(String parameterName, Reader reader, int length) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setDate(String parameterName, Date x, Calendar cal) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setTime(String parameterName, Time x, Calendar cal) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setTimestamp(String parameterName, Timestamp x, Calendar cal) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setNull(String parameterName, int sqlType, String typeName) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default String getString(String parameterName) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default boolean getBoolean(String parameterName) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default byte getByte(String parameterName) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default short getShort(String parameterName) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default int getInt(String parameterName) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default long getLong(String parameterName) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default float getFloat(String parameterName) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default double getDouble(String parameterName) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default byte[] getBytes(String parameterName) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default Date getDate(String parameterName) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default Time getTime(String parameterName) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default Timestamp getTimestamp(String parameterName) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default Object getObject(String parameterName) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default BigDecimal getBigDecimal(String parameterName) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default Object getObject(String parameterName, Map<String, Class<?>> map) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default Ref getRef(String parameterName) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default Blob getBlob(String parameterName) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default Clob getClob(String parameterName) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default Array getArray(String parameterName) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default Date getDate(String parameterName, Calendar cal) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default Time getTime(String parameterName, Calendar cal) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default Timestamp getTimestamp(String parameterName, Calendar cal) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default URL getURL(String parameterName) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default RowId getRowId(int parameterIndex) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default RowId getRowId(String parameterName) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setRowId(String parameterName, RowId x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setNString(String parameterName, String value) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setNCharacterStream(String parameterName, Reader value, long length) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setNClob(String parameterName, NClob value) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setClob(String parameterName, Reader reader, long length) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setBlob(String parameterName, InputStream inputStream, long length) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setNClob(String parameterName, Reader reader, long length) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default NClob getNClob(int parameterIndex) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default NClob getNClob(String parameterName) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setSQLXML(String parameterName, SQLXML xmlObject) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default SQLXML getSQLXML(int parameterIndex) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default SQLXML getSQLXML(String parameterName) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default String getNString(int parameterIndex) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default String getNString(String parameterName) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default Reader getNCharacterStream(int parameterIndex) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default Reader getNCharacterStream(String parameterName) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default Reader getCharacterStream(int parameterIndex) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default Reader getCharacterStream(String parameterName) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setBlob(String parameterName, Blob x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setClob(String parameterName, Clob x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setAsciiStream(String parameterName, InputStream x, long length) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setBinaryStream(String parameterName, InputStream x, long length) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setCharacterStream(String parameterName, Reader reader, long length) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setAsciiStream(String parameterName, InputStream x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setBinaryStream(String parameterName, InputStream x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setCharacterStream(String parameterName, Reader reader) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setNCharacterStream(String parameterName, Reader value) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setClob(String parameterName, Reader reader) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setBlob(String parameterName, InputStream inputStream) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setNClob(String parameterName, Reader reader) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default <T> T getObject(int parameterIndex, Class<T> type) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default <T> T getObject(String parameterName, Class<T> type) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default int executeUpdate() throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setNull(int parameterIndex, int sqlType) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setBoolean(int parameterIndex, boolean x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setByte(int parameterIndex, byte x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setShort(int parameterIndex, short x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setInt(int parameterIndex, int x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setLong(int parameterIndex, long x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setFloat(int parameterIndex, float x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setDouble(int parameterIndex, double x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setBigDecimal(int parameterIndex, BigDecimal x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setString(int parameterIndex, String x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setBytes(int parameterIndex, byte[] x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setDate(int parameterIndex, Date x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setTime(int parameterIndex, Time x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setTimestamp(int parameterIndex, Timestamp x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setAsciiStream(int parameterIndex, InputStream x, int length) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setUnicodeStream(int parameterIndex, InputStream x, int length) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setBinaryStream(int parameterIndex, InputStream x, int length) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void clearParameters() throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setObject(int parameterIndex, Object x, int targetSqlType) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setObject(int parameterIndex, Object x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default boolean execute() throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void addBatch() throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setCharacterStream(int parameterIndex, Reader reader, int length) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setRef(int parameterIndex, Ref x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setBlob(int parameterIndex, Blob x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setClob(int parameterIndex, Clob x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setArray(int parameterIndex, Array x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default ResultSetMetaData getMetaData() throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setDate(int parameterIndex, Date x, Calendar cal) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setTime(int parameterIndex, Time x, Calendar cal) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setTimestamp(int parameterIndex, Timestamp x, Calendar cal) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setNull(int parameterIndex, int sqlType, String typeName) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setURL(int parameterIndex, URL x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default ParameterMetaData getParameterMetaData() throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setRowId(int parameterIndex, RowId x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setNString(int parameterIndex, String value) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setNCharacterStream(int parameterIndex, Reader value, long length) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setNClob(int parameterIndex, NClob value) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setClob(int parameterIndex, Reader reader, long length) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setBlob(int parameterIndex, InputStream inputStream, long length) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setNClob(int parameterIndex, Reader reader, long length) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setSQLXML(int parameterIndex, SQLXML xmlObject) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setObject(int parameterIndex, Object x, int targetSqlType, int scaleOrLength) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setAsciiStream(int parameterIndex, InputStream x, long length) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setBinaryStream(int parameterIndex, InputStream x, long length) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setCharacterStream(int parameterIndex, Reader reader, long length) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setAsciiStream(int parameterIndex, InputStream x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setBinaryStream(int parameterIndex, InputStream x) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setCharacterStream(int parameterIndex, Reader reader) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setNCharacterStream(int parameterIndex, Reader value) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setClob(int parameterIndex, Reader reader) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setBlob(int parameterIndex, InputStream inputStream) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setNClob(int parameterIndex, Reader reader) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default ResultSet executeQuery(String sql) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default int executeUpdate(String sql) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void close() throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default int getMaxFieldSize() throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setMaxFieldSize(int max) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default int getMaxRows() throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setMaxRows(int max) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setEscapeProcessing(boolean enable) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default int getQueryTimeout() throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setQueryTimeout(int seconds) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void cancel() throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default SQLWarning getWarnings() throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void clearWarnings() throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setCursorName(String name) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default boolean execute(String sql) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default ResultSet getResultSet() throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default int getUpdateCount() throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default boolean getMoreResults() throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setFetchDirection(int direction) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default int getFetchDirection() throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setFetchSize(int rows) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default int getFetchSize() throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default int getResultSetConcurrency() throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default int getResultSetType() throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void addBatch(String sql) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void clearBatch() throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default int[] executeBatch() throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default boolean getMoreResults(int current) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default ResultSet getGeneratedKeys() throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default int executeUpdate(String sql, int[] columnIndexes) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default int executeUpdate(String sql, String[] columnNames) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default boolean execute(String sql, int autoGeneratedKeys) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default boolean execute(String sql, int[] columnIndexes) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default boolean execute(String sql, String[] columnNames) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default int getResultSetHoldability() throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default boolean isClosed() throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void setPoolable(boolean poolable) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default boolean isPoolable() throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default void closeOnCompletion() throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default boolean isCloseOnCompletion() throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default <T> T unwrap(Class<T> iface) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } @Override default boolean isWrapperFor(Class<?> iface) throws SQLException { throw new SQLFeatureNotSupportedException("Method not supported"); } }
openjdk/jdk8
36,519
jdk/src/share/classes/javax/xml/crypto/dsig/XMLSignatureFactory.java
/* * Copyright (c) 2005, 2011, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * $Id: XMLSignatureFactory.java,v 1.14 2005/09/15 14:29:01 mullan Exp $ */ package javax.xml.crypto.dsig; import javax.xml.crypto.Data; import javax.xml.crypto.MarshalException; import javax.xml.crypto.NoSuchMechanismException; import javax.xml.crypto.URIDereferencer; import javax.xml.crypto.XMLStructure; import javax.xml.crypto.dom.DOMStructure; import javax.xml.crypto.dsig.keyinfo.KeyInfo; import javax.xml.crypto.dsig.keyinfo.KeyInfoFactory; import javax.xml.crypto.dsig.spec.*; import javax.xml.crypto.dsig.dom.DOMValidateContext; import javax.xml.crypto.dsig.dom.DOMSignContext; import java.security.InvalidAlgorithmParameterException; import java.security.NoSuchAlgorithmException; import java.security.NoSuchProviderException; import java.security.Provider; import java.security.Security; import java.util.List; import sun.security.jca.*; import sun.security.jca.GetInstance.Instance; /** * A factory for creating {@link XMLSignature} objects from scratch or * for unmarshalling an <code>XMLSignature</code> object from a corresponding * XML representation. * * <h2>XMLSignatureFactory Type</h2> * * <p>Each instance of <code>XMLSignatureFactory</code> supports a specific * XML mechanism type. To create an <code>XMLSignatureFactory</code>, call one * of the static {@link #getInstance getInstance} methods, passing in the XML * mechanism type desired, for example: * * <blockquote><code> * XMLSignatureFactory factory = XMLSignatureFactory.getInstance("DOM"); * </code></blockquote> * * <p>The objects that this factory produces will be based * on DOM and abide by the DOM interoperability requirements as defined in the * <a href="../../../../../technotes/guides/security/xmldsig/overview.html#DOM Mechanism Requirements"> * DOM Mechanism Requirements</a> section of the API overview. See the * <a href="../../../../../technotes/guides/security/xmldsig/overview.html#Service Provider"> * Service Providers</a> section of the API overview for a list of standard * mechanism types. * * <p><code>XMLSignatureFactory</code> implementations are registered and loaded * using the {@link java.security.Provider} mechanism. * For example, a service provider that supports the * DOM mechanism would be specified in the <code>Provider</code> subclass as: * <pre> * put("XMLSignatureFactory.DOM", "org.example.DOMXMLSignatureFactory"); * </pre> * * <p>An implementation MUST minimally support the default mechanism type: DOM. * * <p>Note that a caller must use the same <code>XMLSignatureFactory</code> * instance to create the <code>XMLStructure</code>s of a particular * <code>XMLSignature</code> that is to be generated. The behavior is * undefined if <code>XMLStructure</code>s from different providers or * different mechanism types are used together. * * <p>Also, the <code>XMLStructure</code>s that are created by this factory * may contain state specific to the <code>XMLSignature</code> and are not * intended to be reusable. * * <h2>Creating XMLSignatures from scratch</h2> * * <p>Once the <code>XMLSignatureFactory</code> has been created, objects * can be instantiated by calling the appropriate method. For example, a * {@link Reference} instance may be created by invoking one of the * {@link #newReference newReference} methods. * * <h2>Unmarshalling XMLSignatures from XML</h2> * * <p>Alternatively, an <code>XMLSignature</code> may be created from an * existing XML representation by invoking the {@link #unmarshalXMLSignature * unmarshalXMLSignature} method and passing it a mechanism-specific * {@link XMLValidateContext} instance containing the XML content: * * <pre> * DOMValidateContext context = new DOMValidateContext(key, signatureElement); * XMLSignature signature = factory.unmarshalXMLSignature(context); * </pre> * * Each <code>XMLSignatureFactory</code> must support the required * <code>XMLValidateContext</code> types for that factory type, but may support * others. A DOM <code>XMLSignatureFactory</code> must support {@link * DOMValidateContext} objects. * * <h2>Signing and marshalling XMLSignatures to XML</h2> * * Each <code>XMLSignature</code> created by the factory can also be * marshalled to an XML representation and signed, by invoking the * {@link XMLSignature#sign sign} method of the * {@link XMLSignature} object and passing it a mechanism-specific * {@link XMLSignContext} object containing the signing key and * marshalling parameters (see {@link DOMSignContext}). * For example: * * <pre> * DOMSignContext context = new DOMSignContext(privateKey, document); * signature.sign(context); * </pre> * * <b>Concurrent Access</b> * <p>The static methods of this class are guaranteed to be thread-safe. * Multiple threads may concurrently invoke the static methods defined in this * class with no ill effects. * * <p>However, this is not true for the non-static methods defined by this * class. Unless otherwise documented by a specific provider, threads that * need to access a single <code>XMLSignatureFactory</code> instance * concurrently should synchronize amongst themselves and provide the * necessary locking. Multiple threads each manipulating a different * <code>XMLSignatureFactory</code> instance need not synchronize. * * @author Sean Mullan * @author JSR 105 Expert Group * @since 1.6 */ public abstract class XMLSignatureFactory { private String mechanismType; private Provider provider; /** * Default constructor, for invocation by subclasses. */ protected XMLSignatureFactory() {} /** * Returns an <code>XMLSignatureFactory</code> that supports the * specified XML processing mechanism and representation type (ex: "DOM"). * * <p>This method uses the standard JCA provider lookup mechanism to * locate and instantiate an <code>XMLSignatureFactory</code> * implementation of the desired mechanism type. It traverses the list of * registered security <code>Provider</code>s, starting with the most * preferred <code>Provider</code>. A new <code>XMLSignatureFactory</code> * object from the first <code>Provider</code> that supports the specified * mechanism is returned. * * <p>Note that the list of registered providers may be retrieved via * the {@link Security#getProviders() Security.getProviders()} method. * * @param mechanismType the type of the XML processing mechanism and * representation. See the <a * href="../../../../../technotes/guides/security/xmldsig/overview.html#Service Provider"> * Service Providers</a> section of the API overview for a list of * standard mechanism types. * @return a new <code>XMLSignatureFactory</code> * @throws NullPointerException if <code>mechanismType</code> is * <code>null</code> * @throws NoSuchMechanismException if no <code>Provider</code> supports an * <code>XMLSignatureFactory</code> implementation for the specified * mechanism * @see Provider */ public static XMLSignatureFactory getInstance(String mechanismType) { if (mechanismType == null) { throw new NullPointerException("mechanismType cannot be null"); } Instance instance; try { instance = GetInstance.getInstance ("XMLSignatureFactory", null, mechanismType); } catch (NoSuchAlgorithmException nsae) { throw new NoSuchMechanismException(nsae); } XMLSignatureFactory factory = (XMLSignatureFactory) instance.impl; factory.mechanismType = mechanismType; factory.provider = instance.provider; return factory; } /** * Returns an <code>XMLSignatureFactory</code> that supports the * requested XML processing mechanism and representation type (ex: "DOM"), * as supplied by the specified provider. Note that the specified * <code>Provider</code> object does not have to be registered in the * provider list. * * @param mechanismType the type of the XML processing mechanism and * representation. See the <a * href="../../../../../technotes/guides/security/xmldsig/overview.html#Service Provider"> * Service Providers</a> section of the API overview for a list of * standard mechanism types. * @param provider the <code>Provider</code> object * @return a new <code>XMLSignatureFactory</code> * @throws NullPointerException if <code>provider</code> or * <code>mechanismType</code> is <code>null</code> * @throws NoSuchMechanismException if an <code>XMLSignatureFactory</code> * implementation for the specified mechanism is not available * from the specified <code>Provider</code> object * @see Provider */ public static XMLSignatureFactory getInstance(String mechanismType, Provider provider) { if (mechanismType == null) { throw new NullPointerException("mechanismType cannot be null"); } else if (provider == null) { throw new NullPointerException("provider cannot be null"); } Instance instance; try { instance = GetInstance.getInstance ("XMLSignatureFactory", null, mechanismType, provider); } catch (NoSuchAlgorithmException nsae) { throw new NoSuchMechanismException(nsae); } XMLSignatureFactory factory = (XMLSignatureFactory) instance.impl; factory.mechanismType = mechanismType; factory.provider = instance.provider; return factory; } /** * Returns an <code>XMLSignatureFactory</code> that supports the * requested XML processing mechanism and representation type (ex: "DOM"), * as supplied by the specified provider. The specified provider must be * registered in the security provider list. * * <p>Note that the list of registered providers may be retrieved via * the {@link Security#getProviders() Security.getProviders()} method. * * @param mechanismType the type of the XML processing mechanism and * representation. See the <a * href="../../../../../technotes/guides/security/xmldsig/overview.html#Service Provider"> * Service Providers</a> section of the API overview for a list of * standard mechanism types. * @param provider the string name of the provider * @return a new <code>XMLSignatureFactory</code> * @throws NoSuchProviderException if the specified provider is not * registered in the security provider list * @throws NullPointerException if <code>provider</code> or * <code>mechanismType</code> is <code>null</code> * @throws NoSuchMechanismException if an <code>XMLSignatureFactory</code> * implementation for the specified mechanism is not * available from the specified provider * @see Provider */ public static XMLSignatureFactory getInstance(String mechanismType, String provider) throws NoSuchProviderException { if (mechanismType == null) { throw new NullPointerException("mechanismType cannot be null"); } else if (provider == null) { throw new NullPointerException("provider cannot be null"); } else if (provider.length() == 0) { throw new NoSuchProviderException(); } Instance instance; try { instance = GetInstance.getInstance ("XMLSignatureFactory", null, mechanismType, provider); } catch (NoSuchAlgorithmException nsae) { throw new NoSuchMechanismException(nsae); } XMLSignatureFactory factory = (XMLSignatureFactory) instance.impl; factory.mechanismType = mechanismType; factory.provider = instance.provider; return factory; } /** * Returns an <code>XMLSignatureFactory</code> that supports the * default XML processing mechanism and representation type ("DOM"). * * <p>This method uses the standard JCA provider lookup mechanism to * locate and instantiate an <code>XMLSignatureFactory</code> * implementation of the default mechanism type. It traverses the list of * registered security <code>Provider</code>s, starting with the most * preferred <code>Provider</code>. A new <code>XMLSignatureFactory</code> * object from the first <code>Provider</code> that supports the DOM * mechanism is returned. * * <p>Note that the list of registered providers may be retrieved via * the {@link Security#getProviders() Security.getProviders()} method. * * @return a new <code>XMLSignatureFactory</code> * @throws NoSuchMechanismException if no <code>Provider</code> supports an * <code>XMLSignatureFactory</code> implementation for the DOM * mechanism * @see Provider */ public static XMLSignatureFactory getInstance() { return getInstance("DOM"); } /** * Returns the type of the XML processing mechanism and representation * supported by this <code>XMLSignatureFactory</code> (ex: "DOM"). * * @return the XML processing mechanism type supported by this * <code>XMLSignatureFactory</code> */ public final String getMechanismType() { return mechanismType; } /** * Returns the provider of this <code>XMLSignatureFactory</code>. * * @return the provider of this <code>XMLSignatureFactory</code> */ public final Provider getProvider() { return provider; } /** * Creates an <code>XMLSignature</code> and initializes it with the contents * of the specified <code>SignedInfo</code> and <code>KeyInfo</code> * objects. * * @param si the signed info * @param ki the key info (may be <code>null</code>) * @return an <code>XMLSignature</code> * @throws NullPointerException if <code>si</code> is <code>null</code> */ public abstract XMLSignature newXMLSignature(SignedInfo si, KeyInfo ki); /** * Creates an <code>XMLSignature</code> and initializes it with the * specified parameters. * * @param si the signed info * @param ki the key info (may be <code>null</code>) * @param objects a list of {@link XMLObject}s (may be empty or * <code>null</code>) * @param id the Id (may be <code>null</code>) * @param signatureValueId the SignatureValue Id (may be <code>null</code>) * @return an <code>XMLSignature</code> * @throws NullPointerException if <code>si</code> is <code>null</code> * @throws ClassCastException if any of the <code>objects</code> are not of * type <code>XMLObject</code> */ @SuppressWarnings("rawtypes") public abstract XMLSignature newXMLSignature(SignedInfo si, KeyInfo ki, List objects, String id, String signatureValueId); /** * Creates a <code>Reference</code> with the specified URI and digest * method. * * @param uri the reference URI (may be <code>null</code>) * @param dm the digest method * @return a <code>Reference</code> * @throws IllegalArgumentException if <code>uri</code> is not RFC 2396 * compliant * @throws NullPointerException if <code>dm</code> is <code>null</code> */ public abstract Reference newReference(String uri, DigestMethod dm); /** * Creates a <code>Reference</code> with the specified parameters. * * @param uri the reference URI (may be <code>null</code>) * @param dm the digest method * @param transforms a list of {@link Transform}s. The list is defensively * copied to protect against subsequent modification. May be * <code>null</code> or empty. * @param type the reference type, as a URI (may be <code>null</code>) * @param id the reference ID (may be <code>null</code>) * @return a <code>Reference</code> * @throws ClassCastException if any of the <code>transforms</code> are * not of type <code>Transform</code> * @throws IllegalArgumentException if <code>uri</code> is not RFC 2396 * compliant * @throws NullPointerException if <code>dm</code> is <code>null</code> */ @SuppressWarnings("rawtypes") public abstract Reference newReference(String uri, DigestMethod dm, List transforms, String type, String id); /** * Creates a <code>Reference</code> with the specified parameters and * pre-calculated digest value. * * <p>This method is useful when the digest value of a * <code>Reference</code> has been previously computed. See for example, * the * <a href="http://www.oasis-open.org/committees/tc_home.php?wg_abbrev=dss"> * OASIS-DSS (Digital Signature Services)</a> specification. * * @param uri the reference URI (may be <code>null</code>) * @param dm the digest method * @param transforms a list of {@link Transform}s. The list is defensively * copied to protect against subsequent modification. May be * <code>null</code> or empty. * @param type the reference type, as a URI (may be <code>null</code>) * @param id the reference ID (may be <code>null</code>) * @param digestValue the digest value. The array is cloned to protect * against subsequent modification. * @return a <code>Reference</code> * @throws ClassCastException if any of the <code>transforms</code> are * not of type <code>Transform</code> * @throws IllegalArgumentException if <code>uri</code> is not RFC 2396 * compliant * @throws NullPointerException if <code>dm</code> or * <code>digestValue</code> is <code>null</code> */ @SuppressWarnings("rawtypes") public abstract Reference newReference(String uri, DigestMethod dm, List transforms, String type, String id, byte[] digestValue); /** * Creates a <code>Reference</code> with the specified parameters. * * <p>This method is useful when a list of transforms have already been * applied to the <code>Reference</code>. See for example, * the * <a href="http://www.oasis-open.org/committees/tc_home.php?wg_abbrev=dss"> * OASIS-DSS (Digital Signature Services)</a> specification. * * <p>When an <code>XMLSignature</code> containing this reference is * generated, the specified <code>transforms</code> (if non-null) are * applied to the specified <code>result</code>. The * <code>Transforms</code> element of the resulting <code>Reference</code> * element is set to the concatenation of the * <code>appliedTransforms</code> and <code>transforms</code>. * * @param uri the reference URI (may be <code>null</code>) * @param dm the digest method * @param appliedTransforms a list of {@link Transform}s that have * already been applied. The list is defensively * copied to protect against subsequent modification. The list must * contain at least one entry. * @param result the result of processing the sequence of * <code>appliedTransforms</code> * @param transforms a list of {@link Transform}s that are to be applied * when generating the signature. The list is defensively copied to * protect against subsequent modification. May be <code>null</code> * or empty. * @param type the reference type, as a URI (may be <code>null</code>) * @param id the reference ID (may be <code>null</code>) * @return a <code>Reference</code> * @throws ClassCastException if any of the transforms (in either list) * are not of type <code>Transform</code> * @throws IllegalArgumentException if <code>uri</code> is not RFC 2396 * compliant or <code>appliedTransforms</code> is empty * @throws NullPointerException if <code>dm</code>, * <code>appliedTransforms</code> or <code>result</code> is * <code>null</code> */ @SuppressWarnings("rawtypes") public abstract Reference newReference(String uri, DigestMethod dm, List appliedTransforms, Data result, List transforms, String type, String id); /** * Creates a <code>SignedInfo</code> with the specified canonicalization * and signature methods, and list of one or more references. * * @param cm the canonicalization method * @param sm the signature method * @param references a list of one or more {@link Reference}s. The list is * defensively copied to protect against subsequent modification. * @return a <code>SignedInfo</code> * @throws ClassCastException if any of the references are not of * type <code>Reference</code> * @throws IllegalArgumentException if <code>references</code> is empty * @throws NullPointerException if any of the parameters * are <code>null</code> */ @SuppressWarnings("rawtypes") public abstract SignedInfo newSignedInfo(CanonicalizationMethod cm, SignatureMethod sm, List references); /** * Creates a <code>SignedInfo</code> with the specified parameters. * * @param cm the canonicalization method * @param sm the signature method * @param references a list of one or more {@link Reference}s. The list is * defensively copied to protect against subsequent modification. * @param id the id (may be <code>null</code>) * @return a <code>SignedInfo</code> * @throws ClassCastException if any of the references are not of * type <code>Reference</code> * @throws IllegalArgumentException if <code>references</code> is empty * @throws NullPointerException if <code>cm</code>, <code>sm</code>, or * <code>references</code> are <code>null</code> */ @SuppressWarnings("rawtypes") public abstract SignedInfo newSignedInfo(CanonicalizationMethod cm, SignatureMethod sm, List references, String id); // Object factory methods /** * Creates an <code>XMLObject</code> from the specified parameters. * * @param content a list of {@link XMLStructure}s. The list * is defensively copied to protect against subsequent modification. * May be <code>null</code> or empty. * @param id the Id (may be <code>null</code>) * @param mimeType the mime type (may be <code>null</code>) * @param encoding the encoding (may be <code>null</code>) * @return an <code>XMLObject</code> * @throws ClassCastException if <code>content</code> contains any * entries that are not of type {@link XMLStructure} */ @SuppressWarnings("rawtypes") public abstract XMLObject newXMLObject(List content, String id, String mimeType, String encoding); /** * Creates a <code>Manifest</code> containing the specified * list of {@link Reference}s. * * @param references a list of one or more <code>Reference</code>s. The list * is defensively copied to protect against subsequent modification. * @return a <code>Manifest</code> * @throws NullPointerException if <code>references</code> is * <code>null</code> * @throws IllegalArgumentException if <code>references</code> is empty * @throws ClassCastException if <code>references</code> contains any * entries that are not of type {@link Reference} */ @SuppressWarnings("rawtypes") public abstract Manifest newManifest(List references); /** * Creates a <code>Manifest</code> containing the specified * list of {@link Reference}s and optional id. * * @param references a list of one or more <code>Reference</code>s. The list * is defensively copied to protect against subsequent modification. * @param id the id (may be <code>null</code>) * @return a <code>Manifest</code> * @throws NullPointerException if <code>references</code> is * <code>null</code> * @throws IllegalArgumentException if <code>references</code> is empty * @throws ClassCastException if <code>references</code> contains any * entries that are not of type {@link Reference} */ @SuppressWarnings("rawtypes") public abstract Manifest newManifest(List references, String id); /** * Creates a <code>SignatureProperty</code> containing the specified * list of {@link XMLStructure}s, target URI and optional id. * * @param content a list of one or more <code>XMLStructure</code>s. The list * is defensively copied to protect against subsequent modification. * @param target the target URI of the Signature that this property applies * to * @param id the id (may be <code>null</code>) * @return a <code>SignatureProperty</code> * @throws NullPointerException if <code>content</code> or * <code>target</code> is <code>null</code> * @throws IllegalArgumentException if <code>content</code> is empty * @throws ClassCastException if <code>content</code> contains any * entries that are not of type {@link XMLStructure} */ @SuppressWarnings("rawtypes") public abstract SignatureProperty newSignatureProperty (List content, String target, String id); /** * Creates a <code>SignatureProperties</code> containing the specified * list of {@link SignatureProperty}s and optional id. * * @param properties a list of one or more <code>SignatureProperty</code>s. * The list is defensively copied to protect against subsequent * modification. * @param id the id (may be <code>null</code>) * @return a <code>SignatureProperties</code> * @throws NullPointerException if <code>properties</code> * is <code>null</code> * @throws IllegalArgumentException if <code>properties</code> is empty * @throws ClassCastException if <code>properties</code> contains any * entries that are not of type {@link SignatureProperty} */ @SuppressWarnings("rawtypes") public abstract SignatureProperties newSignatureProperties (List properties, String id); // Algorithm factory methods /** * Creates a <code>DigestMethod</code> for the specified algorithm URI * and parameters. * * @param algorithm the URI identifying the digest algorithm * @param params algorithm-specific digest parameters (may be * <code>null</code>) * @return the <code>DigestMethod</code> * @throws InvalidAlgorithmParameterException if the specified parameters * are inappropriate for the requested algorithm * @throws NoSuchAlgorithmException if an implementation of the * specified algorithm cannot be found * @throws NullPointerException if <code>algorithm</code> is * <code>null</code> */ public abstract DigestMethod newDigestMethod(String algorithm, DigestMethodParameterSpec params) throws NoSuchAlgorithmException, InvalidAlgorithmParameterException; /** * Creates a <code>SignatureMethod</code> for the specified algorithm URI * and parameters. * * @param algorithm the URI identifying the signature algorithm * @param params algorithm-specific signature parameters (may be * <code>null</code>) * @return the <code>SignatureMethod</code> * @throws InvalidAlgorithmParameterException if the specified parameters * are inappropriate for the requested algorithm * @throws NoSuchAlgorithmException if an implementation of the * specified algorithm cannot be found * @throws NullPointerException if <code>algorithm</code> is * <code>null</code> */ public abstract SignatureMethod newSignatureMethod(String algorithm, SignatureMethodParameterSpec params) throws NoSuchAlgorithmException, InvalidAlgorithmParameterException; /** * Creates a <code>Transform</code> for the specified algorithm URI * and parameters. * * @param algorithm the URI identifying the transform algorithm * @param params algorithm-specific transform parameters (may be * <code>null</code>) * @return the <code>Transform</code> * @throws InvalidAlgorithmParameterException if the specified parameters * are inappropriate for the requested algorithm * @throws NoSuchAlgorithmException if an implementation of the * specified algorithm cannot be found * @throws NullPointerException if <code>algorithm</code> is * <code>null</code> */ public abstract Transform newTransform(String algorithm, TransformParameterSpec params) throws NoSuchAlgorithmException, InvalidAlgorithmParameterException; /** * Creates a <code>Transform</code> for the specified algorithm URI * and parameters. The parameters are specified as a mechanism-specific * <code>XMLStructure</code> (ex: {@link DOMStructure}). This method is * useful when the parameters are in XML form or there is no standard * class for specifying the parameters. * * @param algorithm the URI identifying the transform algorithm * @param params a mechanism-specific XML structure from which to * unmarshal the parameters from (may be <code>null</code> if * not required or optional) * @return the <code>Transform</code> * @throws ClassCastException if the type of <code>params</code> is * inappropriate for this <code>XMLSignatureFactory</code> * @throws InvalidAlgorithmParameterException if the specified parameters * are inappropriate for the requested algorithm * @throws NoSuchAlgorithmException if an implementation of the * specified algorithm cannot be found * @throws NullPointerException if <code>algorithm</code> is * <code>null</code> */ public abstract Transform newTransform(String algorithm, XMLStructure params) throws NoSuchAlgorithmException, InvalidAlgorithmParameterException; /** * Creates a <code>CanonicalizationMethod</code> for the specified * algorithm URI and parameters. * * @param algorithm the URI identifying the canonicalization algorithm * @param params algorithm-specific canonicalization parameters (may be * <code>null</code>) * @return the <code>CanonicalizationMethod</code> * @throws InvalidAlgorithmParameterException if the specified parameters * are inappropriate for the requested algorithm * @throws NoSuchAlgorithmException if an implementation of the * specified algorithm cannot be found * @throws NullPointerException if <code>algorithm</code> is * <code>null</code> */ public abstract CanonicalizationMethod newCanonicalizationMethod( String algorithm, C14NMethodParameterSpec params) throws NoSuchAlgorithmException, InvalidAlgorithmParameterException; /** * Creates a <code>CanonicalizationMethod</code> for the specified * algorithm URI and parameters. The parameters are specified as a * mechanism-specific <code>XMLStructure</code> (ex: {@link DOMStructure}). * This method is useful when the parameters are in XML form or there is * no standard class for specifying the parameters. * * @param algorithm the URI identifying the canonicalization algorithm * @param params a mechanism-specific XML structure from which to * unmarshal the parameters from (may be <code>null</code> if * not required or optional) * @return the <code>CanonicalizationMethod</code> * @throws ClassCastException if the type of <code>params</code> is * inappropriate for this <code>XMLSignatureFactory</code> * @throws InvalidAlgorithmParameterException if the specified parameters * are inappropriate for the requested algorithm * @throws NoSuchAlgorithmException if an implementation of the * specified algorithm cannot be found * @throws NullPointerException if <code>algorithm</code> is * <code>null</code> */ public abstract CanonicalizationMethod newCanonicalizationMethod( String algorithm, XMLStructure params) throws NoSuchAlgorithmException, InvalidAlgorithmParameterException; /** * Returns a <code>KeyInfoFactory</code> that creates <code>KeyInfo</code> * objects. The returned <code>KeyInfoFactory</code> has the same * mechanism type and provider as this <code>XMLSignatureFactory</code>. * * @return a <code>KeyInfoFactory</code> * @throws NoSuchMechanismException if a <code>KeyFactory</code> * implementation with the same mechanism type and provider * is not available */ public final KeyInfoFactory getKeyInfoFactory() { return KeyInfoFactory.getInstance(getMechanismType(), getProvider()); } /** * Unmarshals a new <code>XMLSignature</code> instance from a * mechanism-specific <code>XMLValidateContext</code> instance. * * @param context a mechanism-specific context from which to unmarshal the * signature from * @return the <code>XMLSignature</code> * @throws NullPointerException if <code>context</code> is * <code>null</code> * @throws ClassCastException if the type of <code>context</code> is * inappropriate for this factory * @throws MarshalException if an unrecoverable exception occurs * during unmarshalling */ public abstract XMLSignature unmarshalXMLSignature (XMLValidateContext context) throws MarshalException; /** * Unmarshals a new <code>XMLSignature</code> instance from a * mechanism-specific <code>XMLStructure</code> instance. * This method is useful if you only want to unmarshal (and not * validate) an <code>XMLSignature</code>. * * @param xmlStructure a mechanism-specific XML structure from which to * unmarshal the signature from * @return the <code>XMLSignature</code> * @throws NullPointerException if <code>xmlStructure</code> is * <code>null</code> * @throws ClassCastException if the type of <code>xmlStructure</code> is * inappropriate for this factory * @throws MarshalException if an unrecoverable exception occurs * during unmarshalling */ public abstract XMLSignature unmarshalXMLSignature (XMLStructure xmlStructure) throws MarshalException; /** * Indicates whether a specified feature is supported. * * @param feature the feature name (as an absolute URI) * @return <code>true</code> if the specified feature is supported, * <code>false</code> otherwise * @throws NullPointerException if <code>feature</code> is <code>null</code> */ public abstract boolean isFeatureSupported(String feature); /** * Returns a reference to the <code>URIDereferencer</code> that is used by * default to dereference URIs in {@link Reference} objects. * * @return a reference to the default <code>URIDereferencer</code> (never * <code>null</code>) */ public abstract URIDereferencer getURIDereferencer(); }
apache/sentry
36,155
sentry-service/sentry-service-api/src/gen/thrift/gen-javabean/org/apache/sentry/api/generic/thrift/TSentryPrivilege.java
/** * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.sentry.api.generic.thrift; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import org.apache.thrift.protocol.TProtocolException; import org.apache.thrift.EncodingUtils; import org.apache.thrift.TException; import org.apache.thrift.async.AsyncMethodCallback; import org.apache.thrift.server.AbstractNonblockingServer.*; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import javax.annotation.Generated; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) @Generated(value = "Autogenerated by Thrift Compiler (0.9.3)") public class TSentryPrivilege implements org.apache.thrift.TBase<TSentryPrivilege, TSentryPrivilege._Fields>, java.io.Serializable, Cloneable, Comparable<TSentryPrivilege> { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TSentryPrivilege"); private static final org.apache.thrift.protocol.TField COMPONENT_FIELD_DESC = new org.apache.thrift.protocol.TField("component", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField SERVICE_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("serviceName", org.apache.thrift.protocol.TType.STRING, (short)2); private static final org.apache.thrift.protocol.TField AUTHORIZABLES_FIELD_DESC = new org.apache.thrift.protocol.TField("authorizables", org.apache.thrift.protocol.TType.LIST, (short)3); private static final org.apache.thrift.protocol.TField ACTION_FIELD_DESC = new org.apache.thrift.protocol.TField("action", org.apache.thrift.protocol.TType.STRING, (short)4); private static final org.apache.thrift.protocol.TField CREATE_TIME_FIELD_DESC = new org.apache.thrift.protocol.TField("createTime", org.apache.thrift.protocol.TType.I64, (short)5); private static final org.apache.thrift.protocol.TField GRANTOR_PRINCIPAL_FIELD_DESC = new org.apache.thrift.protocol.TField("grantorPrincipal", org.apache.thrift.protocol.TType.STRING, (short)6); private static final org.apache.thrift.protocol.TField GRANT_OPTION_FIELD_DESC = new org.apache.thrift.protocol.TField("grantOption", org.apache.thrift.protocol.TType.I32, (short)7); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new TSentryPrivilegeStandardSchemeFactory()); schemes.put(TupleScheme.class, new TSentryPrivilegeTupleSchemeFactory()); } private String component; // required private String serviceName; // required private List<TAuthorizable> authorizables; // required private String action; // required private long createTime; // optional private String grantorPrincipal; // optional private TSentryGrantOption grantOption; // optional /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { COMPONENT((short)1, "component"), SERVICE_NAME((short)2, "serviceName"), AUTHORIZABLES((short)3, "authorizables"), ACTION((short)4, "action"), CREATE_TIME((short)5, "createTime"), GRANTOR_PRINCIPAL((short)6, "grantorPrincipal"), /** * * @see TSentryGrantOption */ GRANT_OPTION((short)7, "grantOption"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // COMPONENT return COMPONENT; case 2: // SERVICE_NAME return SERVICE_NAME; case 3: // AUTHORIZABLES return AUTHORIZABLES; case 4: // ACTION return ACTION; case 5: // CREATE_TIME return CREATE_TIME; case 6: // GRANTOR_PRINCIPAL return GRANTOR_PRINCIPAL; case 7: // GRANT_OPTION return GRANT_OPTION; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments private static final int __CREATETIME_ISSET_ID = 0; private byte __isset_bitfield = 0; private static final _Fields optionals[] = {_Fields.CREATE_TIME,_Fields.GRANTOR_PRINCIPAL,_Fields.GRANT_OPTION}; public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.COMPONENT, new org.apache.thrift.meta_data.FieldMetaData("component", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.SERVICE_NAME, new org.apache.thrift.meta_data.FieldMetaData("serviceName", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.AUTHORIZABLES, new org.apache.thrift.meta_data.FieldMetaData("authorizables", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, TAuthorizable.class)))); tmpMap.put(_Fields.ACTION, new org.apache.thrift.meta_data.FieldMetaData("action", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.CREATE_TIME, new org.apache.thrift.meta_data.FieldMetaData("createTime", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64))); tmpMap.put(_Fields.GRANTOR_PRINCIPAL, new org.apache.thrift.meta_data.FieldMetaData("grantorPrincipal", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.GRANT_OPTION, new org.apache.thrift.meta_data.FieldMetaData("grantOption", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, TSentryGrantOption.class))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(TSentryPrivilege.class, metaDataMap); } public TSentryPrivilege() { this.grantOption = org.apache.sentry.api.generic.thrift.TSentryGrantOption.FALSE; } public TSentryPrivilege( String component, String serviceName, List<TAuthorizable> authorizables, String action) { this(); this.component = component; this.serviceName = serviceName; this.authorizables = authorizables; this.action = action; } /** * Performs a deep copy on <i>other</i>. */ public TSentryPrivilege(TSentryPrivilege other) { __isset_bitfield = other.__isset_bitfield; if (other.isSetComponent()) { this.component = other.component; } if (other.isSetServiceName()) { this.serviceName = other.serviceName; } if (other.isSetAuthorizables()) { List<TAuthorizable> __this__authorizables = new ArrayList<TAuthorizable>(other.authorizables.size()); for (TAuthorizable other_element : other.authorizables) { __this__authorizables.add(new TAuthorizable(other_element)); } this.authorizables = __this__authorizables; } if (other.isSetAction()) { this.action = other.action; } this.createTime = other.createTime; if (other.isSetGrantorPrincipal()) { this.grantorPrincipal = other.grantorPrincipal; } if (other.isSetGrantOption()) { this.grantOption = other.grantOption; } } public TSentryPrivilege deepCopy() { return new TSentryPrivilege(this); } @Override public void clear() { this.component = null; this.serviceName = null; this.authorizables = null; this.action = null; setCreateTimeIsSet(false); this.createTime = 0; this.grantorPrincipal = null; this.grantOption = org.apache.sentry.api.generic.thrift.TSentryGrantOption.FALSE; } public String getComponent() { return this.component; } public void setComponent(String component) { this.component = component; } public void unsetComponent() { this.component = null; } /** Returns true if field component is set (has been assigned a value) and false otherwise */ public boolean isSetComponent() { return this.component != null; } public void setComponentIsSet(boolean value) { if (!value) { this.component = null; } } public String getServiceName() { return this.serviceName; } public void setServiceName(String serviceName) { this.serviceName = serviceName; } public void unsetServiceName() { this.serviceName = null; } /** Returns true if field serviceName is set (has been assigned a value) and false otherwise */ public boolean isSetServiceName() { return this.serviceName != null; } public void setServiceNameIsSet(boolean value) { if (!value) { this.serviceName = null; } } public int getAuthorizablesSize() { return (this.authorizables == null) ? 0 : this.authorizables.size(); } public java.util.Iterator<TAuthorizable> getAuthorizablesIterator() { return (this.authorizables == null) ? null : this.authorizables.iterator(); } public void addToAuthorizables(TAuthorizable elem) { if (this.authorizables == null) { this.authorizables = new ArrayList<TAuthorizable>(); } this.authorizables.add(elem); } public List<TAuthorizable> getAuthorizables() { return this.authorizables; } public void setAuthorizables(List<TAuthorizable> authorizables) { this.authorizables = authorizables; } public void unsetAuthorizables() { this.authorizables = null; } /** Returns true if field authorizables is set (has been assigned a value) and false otherwise */ public boolean isSetAuthorizables() { return this.authorizables != null; } public void setAuthorizablesIsSet(boolean value) { if (!value) { this.authorizables = null; } } public String getAction() { return this.action; } public void setAction(String action) { this.action = action; } public void unsetAction() { this.action = null; } /** Returns true if field action is set (has been assigned a value) and false otherwise */ public boolean isSetAction() { return this.action != null; } public void setActionIsSet(boolean value) { if (!value) { this.action = null; } } public long getCreateTime() { return this.createTime; } public void setCreateTime(long createTime) { this.createTime = createTime; setCreateTimeIsSet(true); } public void unsetCreateTime() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __CREATETIME_ISSET_ID); } /** Returns true if field createTime is set (has been assigned a value) and false otherwise */ public boolean isSetCreateTime() { return EncodingUtils.testBit(__isset_bitfield, __CREATETIME_ISSET_ID); } public void setCreateTimeIsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __CREATETIME_ISSET_ID, value); } public String getGrantorPrincipal() { return this.grantorPrincipal; } public void setGrantorPrincipal(String grantorPrincipal) { this.grantorPrincipal = grantorPrincipal; } public void unsetGrantorPrincipal() { this.grantorPrincipal = null; } /** Returns true if field grantorPrincipal is set (has been assigned a value) and false otherwise */ public boolean isSetGrantorPrincipal() { return this.grantorPrincipal != null; } public void setGrantorPrincipalIsSet(boolean value) { if (!value) { this.grantorPrincipal = null; } } /** * * @see TSentryGrantOption */ public TSentryGrantOption getGrantOption() { return this.grantOption; } /** * * @see TSentryGrantOption */ public void setGrantOption(TSentryGrantOption grantOption) { this.grantOption = grantOption; } public void unsetGrantOption() { this.grantOption = null; } /** Returns true if field grantOption is set (has been assigned a value) and false otherwise */ public boolean isSetGrantOption() { return this.grantOption != null; } public void setGrantOptionIsSet(boolean value) { if (!value) { this.grantOption = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case COMPONENT: if (value == null) { unsetComponent(); } else { setComponent((String)value); } break; case SERVICE_NAME: if (value == null) { unsetServiceName(); } else { setServiceName((String)value); } break; case AUTHORIZABLES: if (value == null) { unsetAuthorizables(); } else { setAuthorizables((List<TAuthorizable>)value); } break; case ACTION: if (value == null) { unsetAction(); } else { setAction((String)value); } break; case CREATE_TIME: if (value == null) { unsetCreateTime(); } else { setCreateTime((Long)value); } break; case GRANTOR_PRINCIPAL: if (value == null) { unsetGrantorPrincipal(); } else { setGrantorPrincipal((String)value); } break; case GRANT_OPTION: if (value == null) { unsetGrantOption(); } else { setGrantOption((TSentryGrantOption)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case COMPONENT: return getComponent(); case SERVICE_NAME: return getServiceName(); case AUTHORIZABLES: return getAuthorizables(); case ACTION: return getAction(); case CREATE_TIME: return getCreateTime(); case GRANTOR_PRINCIPAL: return getGrantorPrincipal(); case GRANT_OPTION: return getGrantOption(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case COMPONENT: return isSetComponent(); case SERVICE_NAME: return isSetServiceName(); case AUTHORIZABLES: return isSetAuthorizables(); case ACTION: return isSetAction(); case CREATE_TIME: return isSetCreateTime(); case GRANTOR_PRINCIPAL: return isSetGrantorPrincipal(); case GRANT_OPTION: return isSetGrantOption(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof TSentryPrivilege) return this.equals((TSentryPrivilege)that); return false; } public boolean equals(TSentryPrivilege that) { if (that == null) return false; boolean this_present_component = true && this.isSetComponent(); boolean that_present_component = true && that.isSetComponent(); if (this_present_component || that_present_component) { if (!(this_present_component && that_present_component)) return false; if (!this.component.equals(that.component)) return false; } boolean this_present_serviceName = true && this.isSetServiceName(); boolean that_present_serviceName = true && that.isSetServiceName(); if (this_present_serviceName || that_present_serviceName) { if (!(this_present_serviceName && that_present_serviceName)) return false; if (!this.serviceName.equals(that.serviceName)) return false; } boolean this_present_authorizables = true && this.isSetAuthorizables(); boolean that_present_authorizables = true && that.isSetAuthorizables(); if (this_present_authorizables || that_present_authorizables) { if (!(this_present_authorizables && that_present_authorizables)) return false; if (!this.authorizables.equals(that.authorizables)) return false; } boolean this_present_action = true && this.isSetAction(); boolean that_present_action = true && that.isSetAction(); if (this_present_action || that_present_action) { if (!(this_present_action && that_present_action)) return false; if (!this.action.equals(that.action)) return false; } boolean this_present_createTime = true && this.isSetCreateTime(); boolean that_present_createTime = true && that.isSetCreateTime(); if (this_present_createTime || that_present_createTime) { if (!(this_present_createTime && that_present_createTime)) return false; if (this.createTime != that.createTime) return false; } boolean this_present_grantorPrincipal = true && this.isSetGrantorPrincipal(); boolean that_present_grantorPrincipal = true && that.isSetGrantorPrincipal(); if (this_present_grantorPrincipal || that_present_grantorPrincipal) { if (!(this_present_grantorPrincipal && that_present_grantorPrincipal)) return false; if (!this.grantorPrincipal.equals(that.grantorPrincipal)) return false; } boolean this_present_grantOption = true && this.isSetGrantOption(); boolean that_present_grantOption = true && that.isSetGrantOption(); if (this_present_grantOption || that_present_grantOption) { if (!(this_present_grantOption && that_present_grantOption)) return false; if (!this.grantOption.equals(that.grantOption)) return false; } return true; } @Override public int hashCode() { List<Object> list = new ArrayList<Object>(); boolean present_component = true && (isSetComponent()); list.add(present_component); if (present_component) list.add(component); boolean present_serviceName = true && (isSetServiceName()); list.add(present_serviceName); if (present_serviceName) list.add(serviceName); boolean present_authorizables = true && (isSetAuthorizables()); list.add(present_authorizables); if (present_authorizables) list.add(authorizables); boolean present_action = true && (isSetAction()); list.add(present_action); if (present_action) list.add(action); boolean present_createTime = true && (isSetCreateTime()); list.add(present_createTime); if (present_createTime) list.add(createTime); boolean present_grantorPrincipal = true && (isSetGrantorPrincipal()); list.add(present_grantorPrincipal); if (present_grantorPrincipal) list.add(grantorPrincipal); boolean present_grantOption = true && (isSetGrantOption()); list.add(present_grantOption); if (present_grantOption) list.add(grantOption.getValue()); return list.hashCode(); } @Override public int compareTo(TSentryPrivilege other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = Boolean.valueOf(isSetComponent()).compareTo(other.isSetComponent()); if (lastComparison != 0) { return lastComparison; } if (isSetComponent()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.component, other.component); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetServiceName()).compareTo(other.isSetServiceName()); if (lastComparison != 0) { return lastComparison; } if (isSetServiceName()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.serviceName, other.serviceName); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetAuthorizables()).compareTo(other.isSetAuthorizables()); if (lastComparison != 0) { return lastComparison; } if (isSetAuthorizables()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.authorizables, other.authorizables); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetAction()).compareTo(other.isSetAction()); if (lastComparison != 0) { return lastComparison; } if (isSetAction()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.action, other.action); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetCreateTime()).compareTo(other.isSetCreateTime()); if (lastComparison != 0) { return lastComparison; } if (isSetCreateTime()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.createTime, other.createTime); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetGrantorPrincipal()).compareTo(other.isSetGrantorPrincipal()); if (lastComparison != 0) { return lastComparison; } if (isSetGrantorPrincipal()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.grantorPrincipal, other.grantorPrincipal); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetGrantOption()).compareTo(other.isSetGrantOption()); if (lastComparison != 0) { return lastComparison; } if (isSetGrantOption()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.grantOption, other.grantOption); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("TSentryPrivilege("); boolean first = true; sb.append("component:"); if (this.component == null) { sb.append("null"); } else { sb.append(this.component); } first = false; if (!first) sb.append(", "); sb.append("serviceName:"); if (this.serviceName == null) { sb.append("null"); } else { sb.append(this.serviceName); } first = false; if (!first) sb.append(", "); sb.append("authorizables:"); if (this.authorizables == null) { sb.append("null"); } else { sb.append(this.authorizables); } first = false; if (!first) sb.append(", "); sb.append("action:"); if (this.action == null) { sb.append("null"); } else { sb.append(this.action); } first = false; if (isSetCreateTime()) { if (!first) sb.append(", "); sb.append("createTime:"); sb.append(this.createTime); first = false; } if (isSetGrantorPrincipal()) { if (!first) sb.append(", "); sb.append("grantorPrincipal:"); if (this.grantorPrincipal == null) { sb.append("null"); } else { sb.append(this.grantorPrincipal); } first = false; } if (isSetGrantOption()) { if (!first) sb.append(", "); sb.append("grantOption:"); if (this.grantOption == null) { sb.append("null"); } else { sb.append(this.grantOption); } first = false; } sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields if (!isSetComponent()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'component' is unset! Struct:" + toString()); } if (!isSetServiceName()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'serviceName' is unset! Struct:" + toString()); } if (!isSetAuthorizables()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'authorizables' is unset! Struct:" + toString()); } if (!isSetAction()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'action' is unset! Struct:" + toString()); } // check for sub-struct validity } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor. __isset_bitfield = 0; read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class TSentryPrivilegeStandardSchemeFactory implements SchemeFactory { public TSentryPrivilegeStandardScheme getScheme() { return new TSentryPrivilegeStandardScheme(); } } private static class TSentryPrivilegeStandardScheme extends StandardScheme<TSentryPrivilege> { public void read(org.apache.thrift.protocol.TProtocol iprot, TSentryPrivilege struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // COMPONENT if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.component = iprot.readString(); struct.setComponentIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // SERVICE_NAME if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.serviceName = iprot.readString(); struct.setServiceNameIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // AUTHORIZABLES if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list0 = iprot.readListBegin(); struct.authorizables = new ArrayList<TAuthorizable>(_list0.size); TAuthorizable _elem1; for (int _i2 = 0; _i2 < _list0.size; ++_i2) { _elem1 = new TAuthorizable(); _elem1.read(iprot); struct.authorizables.add(_elem1); } iprot.readListEnd(); } struct.setAuthorizablesIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 4: // ACTION if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.action = iprot.readString(); struct.setActionIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 5: // CREATE_TIME if (schemeField.type == org.apache.thrift.protocol.TType.I64) { struct.createTime = iprot.readI64(); struct.setCreateTimeIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 6: // GRANTOR_PRINCIPAL if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.grantorPrincipal = iprot.readString(); struct.setGrantorPrincipalIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 7: // GRANT_OPTION if (schemeField.type == org.apache.thrift.protocol.TType.I32) { struct.grantOption = org.apache.sentry.api.generic.thrift.TSentryGrantOption.findByValue(iprot.readI32()); struct.setGrantOptionIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, TSentryPrivilege struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.component != null) { oprot.writeFieldBegin(COMPONENT_FIELD_DESC); oprot.writeString(struct.component); oprot.writeFieldEnd(); } if (struct.serviceName != null) { oprot.writeFieldBegin(SERVICE_NAME_FIELD_DESC); oprot.writeString(struct.serviceName); oprot.writeFieldEnd(); } if (struct.authorizables != null) { oprot.writeFieldBegin(AUTHORIZABLES_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.authorizables.size())); for (TAuthorizable _iter3 : struct.authorizables) { _iter3.write(oprot); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } if (struct.action != null) { oprot.writeFieldBegin(ACTION_FIELD_DESC); oprot.writeString(struct.action); oprot.writeFieldEnd(); } if (struct.isSetCreateTime()) { oprot.writeFieldBegin(CREATE_TIME_FIELD_DESC); oprot.writeI64(struct.createTime); oprot.writeFieldEnd(); } if (struct.grantorPrincipal != null) { if (struct.isSetGrantorPrincipal()) { oprot.writeFieldBegin(GRANTOR_PRINCIPAL_FIELD_DESC); oprot.writeString(struct.grantorPrincipal); oprot.writeFieldEnd(); } } if (struct.grantOption != null) { if (struct.isSetGrantOption()) { oprot.writeFieldBegin(GRANT_OPTION_FIELD_DESC); oprot.writeI32(struct.grantOption.getValue()); oprot.writeFieldEnd(); } } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class TSentryPrivilegeTupleSchemeFactory implements SchemeFactory { public TSentryPrivilegeTupleScheme getScheme() { return new TSentryPrivilegeTupleScheme(); } } private static class TSentryPrivilegeTupleScheme extends TupleScheme<TSentryPrivilege> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, TSentryPrivilege struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; oprot.writeString(struct.component); oprot.writeString(struct.serviceName); { oprot.writeI32(struct.authorizables.size()); for (TAuthorizable _iter4 : struct.authorizables) { _iter4.write(oprot); } } oprot.writeString(struct.action); BitSet optionals = new BitSet(); if (struct.isSetCreateTime()) { optionals.set(0); } if (struct.isSetGrantorPrincipal()) { optionals.set(1); } if (struct.isSetGrantOption()) { optionals.set(2); } oprot.writeBitSet(optionals, 3); if (struct.isSetCreateTime()) { oprot.writeI64(struct.createTime); } if (struct.isSetGrantorPrincipal()) { oprot.writeString(struct.grantorPrincipal); } if (struct.isSetGrantOption()) { oprot.writeI32(struct.grantOption.getValue()); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, TSentryPrivilege struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; struct.component = iprot.readString(); struct.setComponentIsSet(true); struct.serviceName = iprot.readString(); struct.setServiceNameIsSet(true); { org.apache.thrift.protocol.TList _list5 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32()); struct.authorizables = new ArrayList<TAuthorizable>(_list5.size); TAuthorizable _elem6; for (int _i7 = 0; _i7 < _list5.size; ++_i7) { _elem6 = new TAuthorizable(); _elem6.read(iprot); struct.authorizables.add(_elem6); } } struct.setAuthorizablesIsSet(true); struct.action = iprot.readString(); struct.setActionIsSet(true); BitSet incoming = iprot.readBitSet(3); if (incoming.get(0)) { struct.createTime = iprot.readI64(); struct.setCreateTimeIsSet(true); } if (incoming.get(1)) { struct.grantorPrincipal = iprot.readString(); struct.setGrantorPrincipalIsSet(true); } if (incoming.get(2)) { struct.grantOption = org.apache.sentry.api.generic.thrift.TSentryGrantOption.findByValue(iprot.readI32()); struct.setGrantOptionIsSet(true); } } } }
googleapis/google-cloud-java
36,121
java-containeranalysis/proto-google-cloud-containeranalysis-v1beta1/src/main/java/io/grafeas/v1beta1/provenance/Artifact.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/devtools/containeranalysis/v1beta1/provenance/provenance.proto // Protobuf Java Version: 3.25.8 package io.grafeas.v1beta1.provenance; /** * * * <pre> * Artifact describes a build product. * </pre> * * Protobuf type {@code grafeas.v1beta1.provenance.Artifact} */ public final class Artifact extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:grafeas.v1beta1.provenance.Artifact) ArtifactOrBuilder { private static final long serialVersionUID = 0L; // Use Artifact.newBuilder() to construct. private Artifact(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Artifact() { checksum_ = ""; id_ = ""; names_ = com.google.protobuf.LazyStringArrayList.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new Artifact(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return io.grafeas.v1beta1.provenance.Provenance .internal_static_grafeas_v1beta1_provenance_Artifact_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return io.grafeas.v1beta1.provenance.Provenance .internal_static_grafeas_v1beta1_provenance_Artifact_fieldAccessorTable .ensureFieldAccessorsInitialized( io.grafeas.v1beta1.provenance.Artifact.class, io.grafeas.v1beta1.provenance.Artifact.Builder.class); } public static final int CHECKSUM_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object checksum_ = ""; /** * * * <pre> * Hash or checksum value of a binary, or Docker Registry 2.0 digest of a * container. * </pre> * * <code>string checksum = 1;</code> * * @return The checksum. */ @java.lang.Override public java.lang.String getChecksum() { java.lang.Object ref = checksum_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); checksum_ = s; return s; } } /** * * * <pre> * Hash or checksum value of a binary, or Docker Registry 2.0 digest of a * container. * </pre> * * <code>string checksum = 1;</code> * * @return The bytes for checksum. */ @java.lang.Override public com.google.protobuf.ByteString getChecksumBytes() { java.lang.Object ref = checksum_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); checksum_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ID_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object id_ = ""; /** * * * <pre> * Artifact ID, if any; for container images, this will be a URL by digest * like `gcr.io/projectID/imagename&#64;sha256:123456`. * </pre> * * <code>string id = 2;</code> * * @return The id. */ @java.lang.Override public java.lang.String getId() { java.lang.Object ref = id_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); id_ = s; return s; } } /** * * * <pre> * Artifact ID, if any; for container images, this will be a URL by digest * like `gcr.io/projectID/imagename&#64;sha256:123456`. * </pre> * * <code>string id = 2;</code> * * @return The bytes for id. */ @java.lang.Override public com.google.protobuf.ByteString getIdBytes() { java.lang.Object ref = id_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); id_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int NAMES_FIELD_NUMBER = 3; @SuppressWarnings("serial") private com.google.protobuf.LazyStringArrayList names_ = com.google.protobuf.LazyStringArrayList.emptyList(); /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @return A list containing the names. */ public com.google.protobuf.ProtocolStringList getNamesList() { return names_; } /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @return The count of names. */ public int getNamesCount() { return names_.size(); } /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @param index The index of the element to return. * @return The names at the given index. */ public java.lang.String getNames(int index) { return names_.get(index); } /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @param index The index of the value to return. * @return The bytes of the names at the given index. */ public com.google.protobuf.ByteString getNamesBytes(int index) { return names_.getByteString(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(checksum_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, checksum_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, id_); } for (int i = 0; i < names_.size(); i++) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, names_.getRaw(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(checksum_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, checksum_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, id_); } { int dataSize = 0; for (int i = 0; i < names_.size(); i++) { dataSize += computeStringSizeNoTag(names_.getRaw(i)); } size += dataSize; size += 1 * getNamesList().size(); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof io.grafeas.v1beta1.provenance.Artifact)) { return super.equals(obj); } io.grafeas.v1beta1.provenance.Artifact other = (io.grafeas.v1beta1.provenance.Artifact) obj; if (!getChecksum().equals(other.getChecksum())) return false; if (!getId().equals(other.getId())) return false; if (!getNamesList().equals(other.getNamesList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + CHECKSUM_FIELD_NUMBER; hash = (53 * hash) + getChecksum().hashCode(); hash = (37 * hash) + ID_FIELD_NUMBER; hash = (53 * hash) + getId().hashCode(); if (getNamesCount() > 0) { hash = (37 * hash) + NAMES_FIELD_NUMBER; hash = (53 * hash) + getNamesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static io.grafeas.v1beta1.provenance.Artifact parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static io.grafeas.v1beta1.provenance.Artifact parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static io.grafeas.v1beta1.provenance.Artifact parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static io.grafeas.v1beta1.provenance.Artifact parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static io.grafeas.v1beta1.provenance.Artifact parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static io.grafeas.v1beta1.provenance.Artifact parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static io.grafeas.v1beta1.provenance.Artifact parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static io.grafeas.v1beta1.provenance.Artifact parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static io.grafeas.v1beta1.provenance.Artifact parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static io.grafeas.v1beta1.provenance.Artifact parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static io.grafeas.v1beta1.provenance.Artifact parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static io.grafeas.v1beta1.provenance.Artifact parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(io.grafeas.v1beta1.provenance.Artifact prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Artifact describes a build product. * </pre> * * Protobuf type {@code grafeas.v1beta1.provenance.Artifact} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:grafeas.v1beta1.provenance.Artifact) io.grafeas.v1beta1.provenance.ArtifactOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return io.grafeas.v1beta1.provenance.Provenance .internal_static_grafeas_v1beta1_provenance_Artifact_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return io.grafeas.v1beta1.provenance.Provenance .internal_static_grafeas_v1beta1_provenance_Artifact_fieldAccessorTable .ensureFieldAccessorsInitialized( io.grafeas.v1beta1.provenance.Artifact.class, io.grafeas.v1beta1.provenance.Artifact.Builder.class); } // Construct using io.grafeas.v1beta1.provenance.Artifact.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; checksum_ = ""; id_ = ""; names_ = com.google.protobuf.LazyStringArrayList.emptyList(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return io.grafeas.v1beta1.provenance.Provenance .internal_static_grafeas_v1beta1_provenance_Artifact_descriptor; } @java.lang.Override public io.grafeas.v1beta1.provenance.Artifact getDefaultInstanceForType() { return io.grafeas.v1beta1.provenance.Artifact.getDefaultInstance(); } @java.lang.Override public io.grafeas.v1beta1.provenance.Artifact build() { io.grafeas.v1beta1.provenance.Artifact result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public io.grafeas.v1beta1.provenance.Artifact buildPartial() { io.grafeas.v1beta1.provenance.Artifact result = new io.grafeas.v1beta1.provenance.Artifact(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(io.grafeas.v1beta1.provenance.Artifact result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.checksum_ = checksum_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.id_ = id_; } if (((from_bitField0_ & 0x00000004) != 0)) { names_.makeImmutable(); result.names_ = names_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof io.grafeas.v1beta1.provenance.Artifact) { return mergeFrom((io.grafeas.v1beta1.provenance.Artifact) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(io.grafeas.v1beta1.provenance.Artifact other) { if (other == io.grafeas.v1beta1.provenance.Artifact.getDefaultInstance()) return this; if (!other.getChecksum().isEmpty()) { checksum_ = other.checksum_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getId().isEmpty()) { id_ = other.id_; bitField0_ |= 0x00000002; onChanged(); } if (!other.names_.isEmpty()) { if (names_.isEmpty()) { names_ = other.names_; bitField0_ |= 0x00000004; } else { ensureNamesIsMutable(); names_.addAll(other.names_); } onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { checksum_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { id_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { java.lang.String s = input.readStringRequireUtf8(); ensureNamesIsMutable(); names_.add(s); break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object checksum_ = ""; /** * * * <pre> * Hash or checksum value of a binary, or Docker Registry 2.0 digest of a * container. * </pre> * * <code>string checksum = 1;</code> * * @return The checksum. */ public java.lang.String getChecksum() { java.lang.Object ref = checksum_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); checksum_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Hash or checksum value of a binary, or Docker Registry 2.0 digest of a * container. * </pre> * * <code>string checksum = 1;</code> * * @return The bytes for checksum. */ public com.google.protobuf.ByteString getChecksumBytes() { java.lang.Object ref = checksum_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); checksum_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Hash or checksum value of a binary, or Docker Registry 2.0 digest of a * container. * </pre> * * <code>string checksum = 1;</code> * * @param value The checksum to set. * @return This builder for chaining. */ public Builder setChecksum(java.lang.String value) { if (value == null) { throw new NullPointerException(); } checksum_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Hash or checksum value of a binary, or Docker Registry 2.0 digest of a * container. * </pre> * * <code>string checksum = 1;</code> * * @return This builder for chaining. */ public Builder clearChecksum() { checksum_ = getDefaultInstance().getChecksum(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Hash or checksum value of a binary, or Docker Registry 2.0 digest of a * container. * </pre> * * <code>string checksum = 1;</code> * * @param value The bytes for checksum to set. * @return This builder for chaining. */ public Builder setChecksumBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); checksum_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object id_ = ""; /** * * * <pre> * Artifact ID, if any; for container images, this will be a URL by digest * like `gcr.io/projectID/imagename&#64;sha256:123456`. * </pre> * * <code>string id = 2;</code> * * @return The id. */ public java.lang.String getId() { java.lang.Object ref = id_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); id_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Artifact ID, if any; for container images, this will be a URL by digest * like `gcr.io/projectID/imagename&#64;sha256:123456`. * </pre> * * <code>string id = 2;</code> * * @return The bytes for id. */ public com.google.protobuf.ByteString getIdBytes() { java.lang.Object ref = id_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); id_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Artifact ID, if any; for container images, this will be a URL by digest * like `gcr.io/projectID/imagename&#64;sha256:123456`. * </pre> * * <code>string id = 2;</code> * * @param value The id to set. * @return This builder for chaining. */ public Builder setId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } id_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Artifact ID, if any; for container images, this will be a URL by digest * like `gcr.io/projectID/imagename&#64;sha256:123456`. * </pre> * * <code>string id = 2;</code> * * @return This builder for chaining. */ public Builder clearId() { id_ = getDefaultInstance().getId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Artifact ID, if any; for container images, this will be a URL by digest * like `gcr.io/projectID/imagename&#64;sha256:123456`. * </pre> * * <code>string id = 2;</code> * * @param value The bytes for id to set. * @return This builder for chaining. */ public Builder setIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); id_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private com.google.protobuf.LazyStringArrayList names_ = com.google.protobuf.LazyStringArrayList.emptyList(); private void ensureNamesIsMutable() { if (!names_.isModifiable()) { names_ = new com.google.protobuf.LazyStringArrayList(names_); } bitField0_ |= 0x00000004; } /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @return A list containing the names. */ public com.google.protobuf.ProtocolStringList getNamesList() { names_.makeImmutable(); return names_; } /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @return The count of names. */ public int getNamesCount() { return names_.size(); } /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @param index The index of the element to return. * @return The names at the given index. */ public java.lang.String getNames(int index) { return names_.get(index); } /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @param index The index of the value to return. * @return The bytes of the names at the given index. */ public com.google.protobuf.ByteString getNamesBytes(int index) { return names_.getByteString(index); } /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @param index The index to set the value at. * @param value The names to set. * @return This builder for chaining. */ public Builder setNames(int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureNamesIsMutable(); names_.set(index, value); bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @param value The names to add. * @return This builder for chaining. */ public Builder addNames(java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureNamesIsMutable(); names_.add(value); bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @param values The names to add. * @return This builder for chaining. */ public Builder addAllNames(java.lang.Iterable<java.lang.String> values) { ensureNamesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, names_); bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @return This builder for chaining. */ public Builder clearNames() { names_ = com.google.protobuf.LazyStringArrayList.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); ; onChanged(); return this; } /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @param value The bytes of the names to add. * @return This builder for chaining. */ public Builder addNamesBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); ensureNamesIsMutable(); names_.add(value); bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:grafeas.v1beta1.provenance.Artifact) } // @@protoc_insertion_point(class_scope:grafeas.v1beta1.provenance.Artifact) private static final io.grafeas.v1beta1.provenance.Artifact DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new io.grafeas.v1beta1.provenance.Artifact(); } public static io.grafeas.v1beta1.provenance.Artifact getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<Artifact> PARSER = new com.google.protobuf.AbstractParser<Artifact>() { @java.lang.Override public Artifact parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<Artifact> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<Artifact> getParserForType() { return PARSER; } @java.lang.Override public io.grafeas.v1beta1.provenance.Artifact getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,141
java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListTrialsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1beta1/vizier_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1beta1; /** * * * <pre> * Response message for * [VizierService.ListTrials][google.cloud.aiplatform.v1beta1.VizierService.ListTrials]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.ListTrialsResponse} */ public final class ListTrialsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.ListTrialsResponse) ListTrialsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListTrialsResponse.newBuilder() to construct. private ListTrialsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListTrialsResponse() { trials_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListTrialsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.VizierServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListTrialsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.VizierServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListTrialsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.ListTrialsResponse.class, com.google.cloud.aiplatform.v1beta1.ListTrialsResponse.Builder.class); } public static final int TRIALS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.aiplatform.v1beta1.Trial> trials_; /** * * * <pre> * The Trials associated with the Study. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Trial trials = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.aiplatform.v1beta1.Trial> getTrialsList() { return trials_; } /** * * * <pre> * The Trials associated with the Study. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Trial trials = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.aiplatform.v1beta1.TrialOrBuilder> getTrialsOrBuilderList() { return trials_; } /** * * * <pre> * The Trials associated with the Study. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Trial trials = 1;</code> */ @java.lang.Override public int getTrialsCount() { return trials_.size(); } /** * * * <pre> * The Trials associated with the Study. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Trial trials = 1;</code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.Trial getTrials(int index) { return trials_.get(index); } /** * * * <pre> * The Trials associated with the Study. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Trial trials = 1;</code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.TrialOrBuilder getTrialsOrBuilder(int index) { return trials_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Pass this token as the `page_token` field of the request for a * subsequent call. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Pass this token as the `page_token` field of the request for a * subsequent call. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < trials_.size(); i++) { output.writeMessage(1, trials_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < trials_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, trials_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.ListTrialsResponse)) { return super.equals(obj); } com.google.cloud.aiplatform.v1beta1.ListTrialsResponse other = (com.google.cloud.aiplatform.v1beta1.ListTrialsResponse) obj; if (!getTrialsList().equals(other.getTrialsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getTrialsCount() > 0) { hash = (37 * hash) + TRIALS_FIELD_NUMBER; hash = (53 * hash) + getTrialsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1beta1.ListTrialsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.ListTrialsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListTrialsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.ListTrialsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListTrialsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.ListTrialsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListTrialsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.ListTrialsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListTrialsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.ListTrialsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListTrialsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.ListTrialsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1beta1.ListTrialsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for * [VizierService.ListTrials][google.cloud.aiplatform.v1beta1.VizierService.ListTrials]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.ListTrialsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.ListTrialsResponse) com.google.cloud.aiplatform.v1beta1.ListTrialsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.VizierServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListTrialsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.VizierServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListTrialsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.ListTrialsResponse.class, com.google.cloud.aiplatform.v1beta1.ListTrialsResponse.Builder.class); } // Construct using com.google.cloud.aiplatform.v1beta1.ListTrialsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (trialsBuilder_ == null) { trials_ = java.util.Collections.emptyList(); } else { trials_ = null; trialsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1beta1.VizierServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListTrialsResponse_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ListTrialsResponse getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1beta1.ListTrialsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ListTrialsResponse build() { com.google.cloud.aiplatform.v1beta1.ListTrialsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ListTrialsResponse buildPartial() { com.google.cloud.aiplatform.v1beta1.ListTrialsResponse result = new com.google.cloud.aiplatform.v1beta1.ListTrialsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.aiplatform.v1beta1.ListTrialsResponse result) { if (trialsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { trials_ = java.util.Collections.unmodifiableList(trials_); bitField0_ = (bitField0_ & ~0x00000001); } result.trials_ = trials_; } else { result.trials_ = trialsBuilder_.build(); } } private void buildPartial0(com.google.cloud.aiplatform.v1beta1.ListTrialsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1beta1.ListTrialsResponse) { return mergeFrom((com.google.cloud.aiplatform.v1beta1.ListTrialsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.ListTrialsResponse other) { if (other == com.google.cloud.aiplatform.v1beta1.ListTrialsResponse.getDefaultInstance()) return this; if (trialsBuilder_ == null) { if (!other.trials_.isEmpty()) { if (trials_.isEmpty()) { trials_ = other.trials_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureTrialsIsMutable(); trials_.addAll(other.trials_); } onChanged(); } } else { if (!other.trials_.isEmpty()) { if (trialsBuilder_.isEmpty()) { trialsBuilder_.dispose(); trialsBuilder_ = null; trials_ = other.trials_; bitField0_ = (bitField0_ & ~0x00000001); trialsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getTrialsFieldBuilder() : null; } else { trialsBuilder_.addAllMessages(other.trials_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.aiplatform.v1beta1.Trial m = input.readMessage( com.google.cloud.aiplatform.v1beta1.Trial.parser(), extensionRegistry); if (trialsBuilder_ == null) { ensureTrialsIsMutable(); trials_.add(m); } else { trialsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.aiplatform.v1beta1.Trial> trials_ = java.util.Collections.emptyList(); private void ensureTrialsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { trials_ = new java.util.ArrayList<com.google.cloud.aiplatform.v1beta1.Trial>(trials_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.Trial, com.google.cloud.aiplatform.v1beta1.Trial.Builder, com.google.cloud.aiplatform.v1beta1.TrialOrBuilder> trialsBuilder_; /** * * * <pre> * The Trials associated with the Study. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Trial trials = 1;</code> */ public java.util.List<com.google.cloud.aiplatform.v1beta1.Trial> getTrialsList() { if (trialsBuilder_ == null) { return java.util.Collections.unmodifiableList(trials_); } else { return trialsBuilder_.getMessageList(); } } /** * * * <pre> * The Trials associated with the Study. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Trial trials = 1;</code> */ public int getTrialsCount() { if (trialsBuilder_ == null) { return trials_.size(); } else { return trialsBuilder_.getCount(); } } /** * * * <pre> * The Trials associated with the Study. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Trial trials = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.Trial getTrials(int index) { if (trialsBuilder_ == null) { return trials_.get(index); } else { return trialsBuilder_.getMessage(index); } } /** * * * <pre> * The Trials associated with the Study. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Trial trials = 1;</code> */ public Builder setTrials(int index, com.google.cloud.aiplatform.v1beta1.Trial value) { if (trialsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTrialsIsMutable(); trials_.set(index, value); onChanged(); } else { trialsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The Trials associated with the Study. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Trial trials = 1;</code> */ public Builder setTrials( int index, com.google.cloud.aiplatform.v1beta1.Trial.Builder builderForValue) { if (trialsBuilder_ == null) { ensureTrialsIsMutable(); trials_.set(index, builderForValue.build()); onChanged(); } else { trialsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The Trials associated with the Study. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Trial trials = 1;</code> */ public Builder addTrials(com.google.cloud.aiplatform.v1beta1.Trial value) { if (trialsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTrialsIsMutable(); trials_.add(value); onChanged(); } else { trialsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The Trials associated with the Study. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Trial trials = 1;</code> */ public Builder addTrials(int index, com.google.cloud.aiplatform.v1beta1.Trial value) { if (trialsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTrialsIsMutable(); trials_.add(index, value); onChanged(); } else { trialsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The Trials associated with the Study. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Trial trials = 1;</code> */ public Builder addTrials(com.google.cloud.aiplatform.v1beta1.Trial.Builder builderForValue) { if (trialsBuilder_ == null) { ensureTrialsIsMutable(); trials_.add(builderForValue.build()); onChanged(); } else { trialsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The Trials associated with the Study. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Trial trials = 1;</code> */ public Builder addTrials( int index, com.google.cloud.aiplatform.v1beta1.Trial.Builder builderForValue) { if (trialsBuilder_ == null) { ensureTrialsIsMutable(); trials_.add(index, builderForValue.build()); onChanged(); } else { trialsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The Trials associated with the Study. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Trial trials = 1;</code> */ public Builder addAllTrials( java.lang.Iterable<? extends com.google.cloud.aiplatform.v1beta1.Trial> values) { if (trialsBuilder_ == null) { ensureTrialsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, trials_); onChanged(); } else { trialsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The Trials associated with the Study. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Trial trials = 1;</code> */ public Builder clearTrials() { if (trialsBuilder_ == null) { trials_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { trialsBuilder_.clear(); } return this; } /** * * * <pre> * The Trials associated with the Study. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Trial trials = 1;</code> */ public Builder removeTrials(int index) { if (trialsBuilder_ == null) { ensureTrialsIsMutable(); trials_.remove(index); onChanged(); } else { trialsBuilder_.remove(index); } return this; } /** * * * <pre> * The Trials associated with the Study. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Trial trials = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.Trial.Builder getTrialsBuilder(int index) { return getTrialsFieldBuilder().getBuilder(index); } /** * * * <pre> * The Trials associated with the Study. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Trial trials = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.TrialOrBuilder getTrialsOrBuilder(int index) { if (trialsBuilder_ == null) { return trials_.get(index); } else { return trialsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The Trials associated with the Study. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Trial trials = 1;</code> */ public java.util.List<? extends com.google.cloud.aiplatform.v1beta1.TrialOrBuilder> getTrialsOrBuilderList() { if (trialsBuilder_ != null) { return trialsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(trials_); } } /** * * * <pre> * The Trials associated with the Study. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Trial trials = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.Trial.Builder addTrialsBuilder() { return getTrialsFieldBuilder() .addBuilder(com.google.cloud.aiplatform.v1beta1.Trial.getDefaultInstance()); } /** * * * <pre> * The Trials associated with the Study. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Trial trials = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.Trial.Builder addTrialsBuilder(int index) { return getTrialsFieldBuilder() .addBuilder(index, com.google.cloud.aiplatform.v1beta1.Trial.getDefaultInstance()); } /** * * * <pre> * The Trials associated with the Study. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Trial trials = 1;</code> */ public java.util.List<com.google.cloud.aiplatform.v1beta1.Trial.Builder> getTrialsBuilderList() { return getTrialsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.Trial, com.google.cloud.aiplatform.v1beta1.Trial.Builder, com.google.cloud.aiplatform.v1beta1.TrialOrBuilder> getTrialsFieldBuilder() { if (trialsBuilder_ == null) { trialsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.Trial, com.google.cloud.aiplatform.v1beta1.Trial.Builder, com.google.cloud.aiplatform.v1beta1.TrialOrBuilder>( trials_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); trials_ = null; } return trialsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Pass this token as the `page_token` field of the request for a * subsequent call. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Pass this token as the `page_token` field of the request for a * subsequent call. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Pass this token as the `page_token` field of the request for a * subsequent call. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Pass this token as the `page_token` field of the request for a * subsequent call. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Pass this token as the `page_token` field of the request for a * subsequent call. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.ListTrialsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.ListTrialsResponse) private static final com.google.cloud.aiplatform.v1beta1.ListTrialsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.ListTrialsResponse(); } public static com.google.cloud.aiplatform.v1beta1.ListTrialsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListTrialsResponse> PARSER = new com.google.protobuf.AbstractParser<ListTrialsResponse>() { @java.lang.Override public ListTrialsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListTrialsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListTrialsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ListTrialsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
hibernate/hibernate-orm
34,079
hibernate-core/src/main/java/org/hibernate/boot/internal/MetadataBuilderImpl.java
/* * SPDX-License-Identifier: Apache-2.0 * Copyright Red Hat Inc. and Hibernate Authors */ package org.hibernate.boot.internal; import java.util.ArrayList; import java.util.List; import java.util.Locale; import org.hibernate.AnnotationException; import org.hibernate.HibernateException; import org.hibernate.cfg.CacheSettings; import org.hibernate.cfg.JpaComplianceSettings; import org.hibernate.cfg.ManagedBeanSettings; import org.hibernate.cfg.SchemaToolingSettings; import org.hibernate.context.spi.MultiTenancy; import org.hibernate.type.TimeZoneStorageStrategy; import org.hibernate.annotations.CacheConcurrencyStrategy; import org.hibernate.annotations.TimeZoneStorageType; import org.hibernate.boot.CacheRegionDefinition; import org.hibernate.boot.MetadataBuilder; import org.hibernate.boot.MetadataSources; import org.hibernate.boot.archive.scan.spi.ScanEnvironment; import org.hibernate.boot.archive.scan.spi.ScanOptions; import org.hibernate.boot.archive.scan.spi.Scanner; import org.hibernate.boot.archive.spi.ArchiveDescriptorFactory; import org.hibernate.boot.cfgxml.spi.CfgXmlAccessService; import org.hibernate.boot.cfgxml.spi.LoadedConfig; import org.hibernate.boot.cfgxml.spi.MappingReference; import org.hibernate.boot.jaxb.Origin; import org.hibernate.boot.jaxb.hbm.spi.JaxbHbmHibernateMapping; import org.hibernate.boot.jaxb.hbm.transform.HbmXmlTransformer; import org.hibernate.boot.jaxb.hbm.transform.UnsupportedFeatureHandling; import org.hibernate.boot.jaxb.mapping.spi.JaxbEntityMappingsImpl; import org.hibernate.boot.jaxb.spi.Binding; import org.hibernate.boot.model.FunctionContributions; import org.hibernate.boot.model.FunctionContributor; import org.hibernate.boot.model.TypeContributions; import org.hibernate.boot.model.TypeContributor; import org.hibernate.boot.model.convert.internal.ConverterDescriptors; import org.hibernate.boot.model.convert.spi.ConverterDescriptor; import org.hibernate.boot.model.naming.ImplicitNamingStrategy; import org.hibernate.boot.model.naming.ImplicitNamingStrategyJpaCompliantImpl; import org.hibernate.boot.model.naming.PhysicalNamingStrategy; import org.hibernate.boot.model.naming.PhysicalNamingStrategyStandardImpl; import org.hibernate.boot.model.process.spi.MetadataBuildingProcess; import org.hibernate.boot.model.relational.AuxiliaryDatabaseObject; import org.hibernate.boot.model.relational.ColumnOrderingStrategy; import org.hibernate.boot.model.relational.ColumnOrderingStrategyStandard; import org.hibernate.boot.models.xml.spi.PersistenceUnitMetadata; import org.hibernate.boot.registry.BootstrapServiceRegistry; import org.hibernate.boot.registry.StandardServiceRegistry; import org.hibernate.boot.registry.StandardServiceRegistryBuilder; import org.hibernate.boot.registry.classloading.spi.ClassLoaderService; import org.hibernate.boot.registry.selector.spi.StrategySelector; import org.hibernate.boot.spi.BasicTypeRegistration; import org.hibernate.boot.spi.BootstrapContext; import org.hibernate.boot.spi.JpaOrmXmlPersistenceUnitDefaultAware; import org.hibernate.boot.spi.MappingDefaults; import org.hibernate.boot.spi.MetadataBuilderImplementor; import org.hibernate.boot.spi.MetadataBuilderInitializer; import org.hibernate.boot.spi.MetadataBuildingOptions; import org.hibernate.boot.spi.MetadataImplementor; import org.hibernate.boot.spi.MetadataSourcesContributor; import org.hibernate.cache.spi.RegionFactory; import org.hibernate.cache.spi.access.AccessType; import org.hibernate.cfg.MappingSettings; import org.hibernate.dialect.Dialect; import org.hibernate.dialect.TimeZoneSupport; import org.hibernate.engine.config.spi.ConfigurationService; import org.hibernate.engine.jdbc.spi.JdbcServices; import org.hibernate.internal.log.DeprecationLogger; import org.hibernate.metamodel.CollectionClassification; import org.hibernate.query.sqm.function.SqmFunctionDescriptor; import org.hibernate.query.sqm.function.SqmFunctionRegistry; import org.hibernate.service.ServiceRegistry; import org.hibernate.service.spi.ServiceException; import org.hibernate.type.BasicType; import org.hibernate.type.SqlTypes; import org.hibernate.type.WrapperArrayHandling; import org.hibernate.type.spi.TypeConfiguration; import org.hibernate.usertype.CompositeUserType; import org.hibernate.usertype.UserType; import jakarta.persistence.AttributeConverter; import jakarta.persistence.ConstraintMode; import jakarta.persistence.SharedCacheMode; import static org.hibernate.engine.config.spi.StandardConverters.BOOLEAN; import static org.hibernate.engine.config.spi.StandardConverters.STRING; import static org.hibernate.boot.BootLogging.BOOT_LOGGER; import static org.hibernate.internal.util.NullnessHelper.coalesceSuppliedValues; import static org.hibernate.internal.util.StringHelper.nullIfEmpty; import static org.hibernate.internal.util.collections.CollectionHelper.isNotEmpty; /** * @author Steve Ebersole */ public class MetadataBuilderImpl implements MetadataBuilderImplementor, TypeContributions { private final MetadataSources sources; private final BootstrapContextImpl bootstrapContext; private final MetadataBuildingOptionsImpl options; public MetadataBuilderImpl(MetadataSources sources) { this( sources, getStandardServiceRegistry( sources.getServiceRegistry() ) ); } public static StandardServiceRegistry getStandardServiceRegistry(ServiceRegistry serviceRegistry) { if ( serviceRegistry == null ) { throw new HibernateException( "ServiceRegistry passed to MetadataBuilder cannot be null" ); } else if ( serviceRegistry instanceof StandardServiceRegistry standardServiceRegistry ) { return standardServiceRegistry; } else if ( serviceRegistry instanceof BootstrapServiceRegistry bootstrapServiceRegistry ) { BOOT_LOGGER.badServiceRegistry(); return new StandardServiceRegistryBuilder( bootstrapServiceRegistry ).build(); } else { throw new HibernateException( String.format( "Unexpected type of ServiceRegistry [%s] encountered in attempt to build MetadataBuilder", serviceRegistry.getClass().getName() ) ); } } public MetadataBuilderImpl(MetadataSources sources, StandardServiceRegistry serviceRegistry) { this.sources = sources; this.options = new MetadataBuildingOptionsImpl( serviceRegistry ); this.bootstrapContext = new BootstrapContextImpl( serviceRegistry, options ); //this is needed only for implementing deprecated method options.setBootstrapContext( bootstrapContext ); for ( MetadataSourcesContributor contributor : sources.getServiceRegistry() .requireService( ClassLoaderService.class ) .loadJavaServices( MetadataSourcesContributor.class ) ) { contributor.contribute( sources ); } // todo : not so sure this is needed anymore. // these should be set during the StandardServiceRegistryBuilder.configure call applyCfgXmlValues( serviceRegistry.requireService( CfgXmlAccessService.class ) ); for ( MetadataBuilderInitializer contributor : serviceRegistry.requireService( ClassLoaderService.class ) .loadJavaServices( MetadataBuilderInitializer.class ) ) { contributor.contribute( this, serviceRegistry ); } } private void applyCfgXmlValues(CfgXmlAccessService service) { final LoadedConfig aggregatedConfig = service.getAggregatedConfig(); if ( aggregatedConfig != null ) { for ( CacheRegionDefinition cacheRegionDefinition : aggregatedConfig.getCacheRegionDefinitions() ) { applyCacheRegionDefinition( cacheRegionDefinition ); } } } @Override public MetadataBuilder applyImplicitSchemaName(String implicitSchemaName) { options.mappingDefaults.implicitSchemaName = implicitSchemaName; return this; } @Override public MetadataBuilder applyImplicitCatalogName(String implicitCatalogName) { options.mappingDefaults.implicitCatalogName = implicitCatalogName; return this; } @Override public MetadataBuilder applyImplicitNamingStrategy(ImplicitNamingStrategy namingStrategy) { options.implicitNamingStrategy = namingStrategy; return this; } @Override public MetadataBuilder applyPhysicalNamingStrategy(PhysicalNamingStrategy namingStrategy) { options.physicalNamingStrategy = namingStrategy; return this; } @Override public MetadataBuilder applyColumnOrderingStrategy(ColumnOrderingStrategy columnOrderingStrategy) { options.columnOrderingStrategy = columnOrderingStrategy; return this; } @Override public MetadataBuilder applySharedCacheMode(SharedCacheMode sharedCacheMode) { options.sharedCacheMode = sharedCacheMode; return this; } @Override public MetadataBuilder applyAccessType(AccessType implicitCacheAccessType) { options.mappingDefaults.implicitCacheAccessType = implicitCacheAccessType; return this; } @Override public MetadataBuilder applyIndexView(Object jandexView) { return this; } @Override public MetadataBuilder applyScanOptions(ScanOptions scanOptions) { bootstrapContext.injectScanOptions( scanOptions ); return this; } @Override public MetadataBuilder applyScanEnvironment(ScanEnvironment scanEnvironment) { bootstrapContext.injectScanEnvironment( scanEnvironment ); return this; } @Override public MetadataBuilder applyScanner(Scanner scanner) { bootstrapContext.injectScanner( scanner ); return this; } @Override public MetadataBuilder applyArchiveDescriptorFactory(ArchiveDescriptorFactory factory) { bootstrapContext.injectArchiveDescriptorFactory( factory ); return this; } @Override public MetadataBuilder applyImplicitListSemantics(CollectionClassification classification) { if ( classification != null ) { options.mappingDefaults.implicitListClassification = classification; } return this; } @Override public MetadataBuilder enableExplicitDiscriminatorsForJoinedSubclassSupport(boolean supported) { options.explicitDiscriminatorsForJoinedInheritanceSupported = supported; return this; } @Override public MetadataBuilder enableImplicitDiscriminatorsForJoinedSubclassSupport(boolean supported) { options.implicitDiscriminatorsForJoinedInheritanceSupported = supported; return this; } @Override public MetadataBuilder enableImplicitForcingOfDiscriminatorsInSelect(boolean supported) { options.implicitlyForceDiscriminatorInSelect = supported; return this; } @Override public MetadataBuilder enableGlobalNationalizedCharacterDataSupport(boolean enabled) { options.useNationalizedCharacterData = enabled; return this; } @Override public MetadataBuilder applyBasicType(BasicType<?> type) { options.basicTypeRegistrations.add( new BasicTypeRegistration( type ) ); return this; } @Override public MetadataBuilder applyBasicType(BasicType<?> type, String... keys) { options.basicTypeRegistrations.add( new BasicTypeRegistration( type, keys ) ); return this; } @Override public MetadataBuilder applyBasicType(UserType<?> type, String... keys) { options.basicTypeRegistrations.add( new BasicTypeRegistration( type, keys, getTypeConfiguration() ) ); return this; } @Override public MetadataBuilder applyTypes(TypeContributor typeContributor) { typeContributor.contribute( this, options.serviceRegistry ); return this; } @Override @Deprecated public void contributeType(BasicType<?> type) { options.basicTypeRegistrations.add( new BasicTypeRegistration( type ) ); } @Override @Deprecated public void contributeType(BasicType<?> type, String... keys) { options.basicTypeRegistrations.add( new BasicTypeRegistration( type, keys ) ); } @Override @Deprecated public void contributeType(UserType<?> type, String[] keys) { options.basicTypeRegistrations.add( new BasicTypeRegistration( type, keys, getTypeConfiguration() ) ); } @Override public void contributeType(CompositeUserType<?> type) { options.compositeUserTypes.add( type ); } @Override public TypeConfiguration getTypeConfiguration() { return bootstrapContext.getTypeConfiguration(); } @Override public void contributeAttributeConverter(Class<? extends AttributeConverter<?,?>> converterClass) { bootstrapContext.addAttributeConverterDescriptor( ConverterDescriptors.of( converterClass, bootstrapContext.getClassmateContext() ) ); } @Override public MetadataBuilder applyCacheRegionDefinition(CacheRegionDefinition cacheRegionDefinition) { bootstrapContext.addCacheRegionDefinition( cacheRegionDefinition ); return this; } @Override public MetadataBuilder applyTempClassLoader(ClassLoader tempClassLoader) { bootstrapContext.injectJpaTempClassLoader( tempClassLoader ); return this; } public MetadataBuilder noConstraintByDefault() { options.noConstraintByDefault = true; return this; } @Override public MetadataBuilder applyFunctions(FunctionContributor functionContributor) { functionContributor.contributeFunctions( new FunctionContributions() { @Override public SqmFunctionRegistry getFunctionRegistry() { return bootstrapContext.getFunctionRegistry(); } @Override public TypeConfiguration getTypeConfiguration() { return bootstrapContext.getTypeConfiguration(); } @Override public ServiceRegistry getServiceRegistry() { return bootstrapContext.getServiceRegistry(); } } ); return this; } @Override public MetadataBuilder applySqlFunction(String functionName, SqmFunctionDescriptor function) { bootstrapContext.addSqlFunction( functionName, function ); return this; } @Override public MetadataBuilder applyAuxiliaryDatabaseObject(AuxiliaryDatabaseObject auxiliaryDatabaseObject) { bootstrapContext.addAuxiliaryDatabaseObject( auxiliaryDatabaseObject ); return this; } @Override public MetadataBuilder applyAttributeConverter(ConverterDescriptor<?,?> descriptor) { bootstrapContext.addAttributeConverterDescriptor( descriptor ); return this; } @Override public <O,R> MetadataBuilder applyAttributeConverter(Class<? extends AttributeConverter<O,R>> attributeConverterClass) { bootstrapContext.addAttributeConverterDescriptor( ConverterDescriptors.of( attributeConverterClass, bootstrapContext.getClassmateContext() ) ); return this; } @Override public <O,R> MetadataBuilder applyAttributeConverter(Class<? extends AttributeConverter<O,R>> attributeConverterClass, boolean autoApply) { bootstrapContext.addAttributeConverterDescriptor( ConverterDescriptors.of( attributeConverterClass, autoApply, false, bootstrapContext.getClassmateContext() ) ); return this; } @Override public <O,R> MetadataBuilder applyAttributeConverter(AttributeConverter<O,R> attributeConverter) { bootstrapContext.addAttributeConverterDescriptor( ConverterDescriptors.of( attributeConverter, bootstrapContext.getClassmateContext() ) ); return this; } @Override public MetadataBuilder applyAttributeConverter(AttributeConverter<?,?> attributeConverter, boolean autoApply) { bootstrapContext.addAttributeConverterDescriptor( ConverterDescriptors.of( attributeConverter, autoApply, bootstrapContext.getClassmateContext() ) ); return this; } @Override public MetadataImplementor build() { final CfgXmlAccessService cfgXmlAccessService = options.serviceRegistry.requireService( CfgXmlAccessService.class ); if ( cfgXmlAccessService.getAggregatedConfig() != null ) { if ( cfgXmlAccessService.getAggregatedConfig().getMappingReferences() != null ) { for ( MappingReference mappingReference : cfgXmlAccessService.getAggregatedConfig().getMappingReferences() ) { mappingReference.apply( sources ); } } } final MetadataImplementor bootModel = MetadataBuildingProcess.build( sources, bootstrapContext, options ); if ( isNotEmpty( sources.getHbmXmlBindings() ) ) { final ConfigurationService configurationService = bootstrapContext.getConfigurationService(); final boolean transformHbm = configurationService != null && configurationService.getSetting( MappingSettings.TRANSFORM_HBM_XML, BOOLEAN,false ); if ( !transformHbm ) { for ( Binding<JaxbHbmHibernateMapping> hbmXmlBinding : sources.getHbmXmlBindings() ) { final Origin origin = hbmXmlBinding.getOrigin(); DeprecationLogger.DEPRECATION_LOGGER.logDeprecatedHbmXmlProcessing( origin.getType(), origin.getName() ); } } else { final List<Binding<JaxbEntityMappingsImpl>> transformed = HbmXmlTransformer.transform( sources.getHbmXmlBindings(), bootModel, UnsupportedFeatureHandling.fromSetting( configurationService.getSettings().get( MappingSettings.TRANSFORM_HBM_XML_FEATURE_HANDLING ), UnsupportedFeatureHandling.ERROR ) ); final MetadataSources newSources = new MetadataSources( bootstrapContext.getServiceRegistry() ); if ( sources.getAnnotatedClasses() != null ) { sources.getAnnotatedClasses().forEach( newSources::addAnnotatedClass ); } if ( sources.getAnnotatedClassNames() != null ) { sources.getAnnotatedClassNames().forEach( newSources::addAnnotatedClassName ); } if ( sources.getAnnotatedPackages() != null ) { sources.getAnnotatedPackages().forEach( newSources::addPackage ); } if ( sources.getExtraQueryImports() != null ) { sources.getExtraQueryImports().forEach( newSources::addQueryImport ); } for ( Binding<JaxbEntityMappingsImpl> mappingXmlBinding : transformed ) { newSources.addMappingXmlBinding( mappingXmlBinding ); } return (MetadataImplementor) newSources.buildMetadata(); } } return bootModel; } @Override public BootstrapContext getBootstrapContext() { return bootstrapContext; } @Override public MetadataBuildingOptions getMetadataBuildingOptions() { return options; } public static class MappingDefaultsImpl implements MappingDefaults { private String implicitSchemaName; private String implicitCatalogName; private boolean implicitlyQuoteIdentifiers; private AccessType implicitCacheAccessType; private CollectionClassification implicitListClassification; public MappingDefaultsImpl(StandardServiceRegistry serviceRegistry) { final ConfigurationService configService = serviceRegistry.requireService( ConfigurationService.class ); // AvailableSettings.DEFAULT_SCHEMA and AvailableSettings.DEFAULT_CATALOG // are taken into account later, at runtime, when rendering table/sequence names. // These fields are exclusively about mapping defaults, // overridden in XML mappings or through setters in MetadataBuilder. implicitSchemaName = null; implicitCatalogName = null; implicitlyQuoteIdentifiers = configService.getSetting( MappingSettings.GLOBALLY_QUOTED_IDENTIFIERS, BOOLEAN, false ); implicitCacheAccessType = configService.getSetting( CacheSettings.DEFAULT_CACHE_CONCURRENCY_STRATEGY, value -> AccessType.fromExternalName( value.toString() ) ); implicitListClassification = configService.getSetting( MappingSettings.DEFAULT_LIST_SEMANTICS, value -> { final CollectionClassification classification = CollectionClassification.interpretSetting( value ); if ( classification != CollectionClassification.LIST && classification != CollectionClassification.BAG ) { throw new AnnotationException( String.format( Locale.ROOT, "'%s' should specify either '%s' or '%s' (was '%s')", MappingSettings.DEFAULT_LIST_SEMANTICS, java.util.List.class.getName(), java.util.Collection.class.getName(), classification.name() ) ); } return classification; }, CollectionClassification.BAG ); } @Override public String getImplicitSchemaName() { return implicitSchemaName; } @Override public String getImplicitCatalogName() { return implicitCatalogName; } @Override public boolean shouldImplicitlyQuoteIdentifiers() { return implicitlyQuoteIdentifiers; } @Override public String getImplicitIdColumnName() { return DEFAULT_IDENTIFIER_COLUMN_NAME; } @Override public String getImplicitTenantIdColumnName() { return DEFAULT_TENANT_IDENTIFIER_COLUMN_NAME; } @Override public String getImplicitDiscriminatorColumnName() { return DEFAULT_DISCRIMINATOR_COLUMN_NAME; } @Override public String getImplicitPackageName() { return null; } @Override public boolean isAutoImportEnabled() { return true; } @Override public String getImplicitCascadeStyleName() { return DEFAULT_CASCADE_NAME; } @Override public String getImplicitPropertyAccessorName() { return DEFAULT_PROPERTY_ACCESS_NAME; } @Override public boolean areEntitiesImplicitlyLazy() { // for now, just hard-code return false; } @Override public boolean areCollectionsImplicitlyLazy() { // for now, just hard-code return true; } @Override public AccessType getImplicitCacheAccessType() { return implicitCacheAccessType; } @Override public CollectionClassification getImplicitListClassification() { return implicitListClassification; } } public static class MetadataBuildingOptionsImpl implements MetadataBuildingOptions, JpaOrmXmlPersistenceUnitDefaultAware { private final StandardServiceRegistry serviceRegistry; private final MappingDefaultsImpl mappingDefaults; private final TimeZoneStorageType defaultTimezoneStorage; private final WrapperArrayHandling wrapperArrayHandling; // todo (6.0) : remove bootstrapContext property along with the deprecated methods private BootstrapContext bootstrapContext; private final ArrayList<BasicTypeRegistration> basicTypeRegistrations = new ArrayList<>(); private final ArrayList<CompositeUserType<?>> compositeUserTypes = new ArrayList<>(); private ImplicitNamingStrategy implicitNamingStrategy; private PhysicalNamingStrategy physicalNamingStrategy; private ColumnOrderingStrategy columnOrderingStrategy; private SharedCacheMode sharedCacheMode; private final AccessType defaultCacheAccessType; private final boolean multiTenancyEnabled; private boolean explicitDiscriminatorsForJoinedInheritanceSupported; private boolean implicitDiscriminatorsForJoinedInheritanceSupported; private boolean implicitlyForceDiscriminatorInSelect; private boolean useNationalizedCharacterData; private boolean noConstraintByDefault; private final String schemaCharset; private final boolean xmlMappingEnabled; private final boolean allowExtensionsInCdi; private final boolean xmlFormatMapperLegacyFormat; public MetadataBuildingOptionsImpl(StandardServiceRegistry serviceRegistry) { this.serviceRegistry = serviceRegistry; final StrategySelector strategySelector = serviceRegistry.requireService( StrategySelector.class ); final ConfigurationService configService = serviceRegistry.requireService( ConfigurationService.class ); mappingDefaults = new MappingDefaultsImpl( serviceRegistry ); defaultTimezoneStorage = resolveTimeZoneStorageStrategy( configService ); wrapperArrayHandling = resolveWrapperArrayHandling( configService ); multiTenancyEnabled = MultiTenancy.isMultiTenancyEnabled( serviceRegistry ); xmlMappingEnabled = configService.getSetting( MappingSettings.XML_MAPPING_ENABLED, BOOLEAN, true ); xmlFormatMapperLegacyFormat = configService.getSetting( MappingSettings.XML_FORMAT_MAPPER_LEGACY_FORMAT, BOOLEAN, false ); implicitDiscriminatorsForJoinedInheritanceSupported = configService.getSetting( MappingSettings.IMPLICIT_DISCRIMINATOR_COLUMNS_FOR_JOINED_SUBCLASS, BOOLEAN, false ); explicitDiscriminatorsForJoinedInheritanceSupported = !configService.getSetting( MappingSettings.IGNORE_EXPLICIT_DISCRIMINATOR_COLUMNS_FOR_JOINED_SUBCLASS, BOOLEAN, false ); implicitlyForceDiscriminatorInSelect = configService.getSetting( MappingSettings.FORCE_DISCRIMINATOR_IN_SELECTS_BY_DEFAULT, BOOLEAN, false ); sharedCacheMode = configService.getSetting( CacheSettings.JAKARTA_SHARED_CACHE_MODE, value -> value instanceof SharedCacheMode cacheMode ? cacheMode : SharedCacheMode.valueOf( value.toString() ), configService.getSetting( CacheSettings.JPA_SHARED_CACHE_MODE, value -> { if ( value == null ) { return null; } DeprecationLogger.DEPRECATION_LOGGER.deprecatedSetting( CacheSettings.JPA_SHARED_CACHE_MODE, CacheSettings.JAKARTA_SHARED_CACHE_MODE ); return value instanceof SharedCacheMode cacheMode ? cacheMode : SharedCacheMode.valueOf( value.toString() ); }, SharedCacheMode.UNSPECIFIED ) ); final RegionFactory regionFactory = serviceRegistry.getService( RegionFactory.class ); defaultCacheAccessType = configService.getSetting( CacheSettings.DEFAULT_CACHE_CONCURRENCY_STRATEGY, value -> { if ( value == null ) { return null; } else if ( value instanceof CacheConcurrencyStrategy cacheConcurrencyStrategy ) { return cacheConcurrencyStrategy.toAccessType(); } else if ( value instanceof AccessType accessType ) { return accessType; } else { return AccessType.fromExternalName( value.toString() ); } }, // by default, see if the defined RegionFactory (if one) defines a default regionFactory == null ? null : regionFactory.getDefaultAccessType() ); final String defaultConstraintMode = configService.getSetting( SchemaToolingSettings.HBM2DDL_DEFAULT_CONSTRAINT_MODE, STRING, null ); noConstraintByDefault = ConstraintMode.NO_CONSTRAINT.name() .equalsIgnoreCase( defaultConstraintMode ); implicitNamingStrategy = strategySelector.<ImplicitNamingStrategy>resolveDefaultableStrategy( ImplicitNamingStrategy.class, configService.getSettings().get( MappingSettings.IMPLICIT_NAMING_STRATEGY ), () -> strategySelector.resolveDefaultableStrategy( ImplicitNamingStrategy.class, "default", ImplicitNamingStrategyJpaCompliantImpl.INSTANCE ) ); physicalNamingStrategy = strategySelector.resolveDefaultableStrategy( PhysicalNamingStrategy.class, configService.getSettings().get( MappingSettings.PHYSICAL_NAMING_STRATEGY ), PhysicalNamingStrategyStandardImpl.INSTANCE ); columnOrderingStrategy = strategySelector.<ColumnOrderingStrategy>resolveDefaultableStrategy( ColumnOrderingStrategy.class, configService.getSettings().get( MappingSettings.COLUMN_ORDERING_STRATEGY ), () -> strategySelector.resolveDefaultableStrategy( ColumnOrderingStrategy.class, "default", ColumnOrderingStrategyStandard.INSTANCE ) ); useNationalizedCharacterData = configService.getSetting( MappingSettings.USE_NATIONALIZED_CHARACTER_DATA, BOOLEAN, false ); schemaCharset = configService.getSetting( SchemaToolingSettings.HBM2DDL_CHARSET_NAME, STRING, null ); allowExtensionsInCdi = configService.getSetting( ManagedBeanSettings.ALLOW_EXTENSIONS_IN_CDI, BOOLEAN, false ); } @Override public StandardServiceRegistry getServiceRegistry() { return serviceRegistry; } @Override public MappingDefaults getMappingDefaults() { return mappingDefaults; } @Override public TimeZoneStorageStrategy getDefaultTimeZoneStorage() { return toTimeZoneStorageStrategy( getTimeZoneSupport() ); } private Dialect getDialect() { return serviceRegistry.requireService( JdbcServices.class ).getDialect(); } @Override public TimeZoneSupport getTimeZoneSupport() { try { return getDialect().getTimeZoneSupport(); } catch ( ServiceException se ) { return TimeZoneSupport.NONE; } } private TimeZoneStorageStrategy toTimeZoneStorageStrategy(TimeZoneSupport timeZoneSupport) { return switch (defaultTimezoneStorage) { case NATIVE -> { if ( timeZoneSupport != TimeZoneSupport.NATIVE ) { throw new HibernateException( "The configured time zone storage type NATIVE is not supported with the configured dialect" ); } yield TimeZoneStorageStrategy.NATIVE; } case COLUMN -> TimeZoneStorageStrategy.COLUMN; case NORMALIZE -> TimeZoneStorageStrategy.NORMALIZE; case NORMALIZE_UTC -> TimeZoneStorageStrategy.NORMALIZE_UTC; case AUTO -> switch (timeZoneSupport) { // if the db has native support for timezones, we use that, not a column case NATIVE -> TimeZoneStorageStrategy.NATIVE; // otherwise we use a separate column case NORMALIZE, NONE -> TimeZoneStorageStrategy.COLUMN; }; case DEFAULT -> switch (timeZoneSupport) { // if the db has native support for timezones, we use that, and don't normalize case NATIVE -> TimeZoneStorageStrategy.NATIVE; // otherwise we normalize things to UTC case NORMALIZE, NONE -> TimeZoneStorageStrategy.NORMALIZE_UTC; }; }; } @Override public WrapperArrayHandling getWrapperArrayHandling() { return wrapperArrayHandling == WrapperArrayHandling.PICK ? pickWrapperArrayHandling( getDialect() ) : wrapperArrayHandling; } @Override public List<BasicTypeRegistration> getBasicTypeRegistrations() { return basicTypeRegistrations; } @Override public List<CompositeUserType<?>> getCompositeUserTypes() { return compositeUserTypes; } @Override public TypeConfiguration getTypeConfiguration() { return bootstrapContext.getTypeConfiguration(); } @Override public ImplicitNamingStrategy getImplicitNamingStrategy() { return implicitNamingStrategy; } @Override public PhysicalNamingStrategy getPhysicalNamingStrategy() { return physicalNamingStrategy; } @Override public ColumnOrderingStrategy getColumnOrderingStrategy() { return columnOrderingStrategy; } @Override public SharedCacheMode getSharedCacheMode() { return sharedCacheMode; } @Override public AccessType getImplicitCacheAccessType() { return defaultCacheAccessType; } @Override public boolean isMultiTenancyEnabled() { return multiTenancyEnabled; } @Override public boolean ignoreExplicitDiscriminatorsForJoinedInheritance() { return !explicitDiscriminatorsForJoinedInheritanceSupported; } @Override public boolean createImplicitDiscriminatorsForJoinedInheritance() { return implicitDiscriminatorsForJoinedInheritanceSupported; } @Override public boolean shouldImplicitlyForceDiscriminatorInSelect() { return implicitlyForceDiscriminatorInSelect; } @Override public boolean useNationalizedCharacterData() { return useNationalizedCharacterData; } @Override public boolean isNoConstraintByDefault() { return noConstraintByDefault; } @Override public String getSchemaCharset() { return schemaCharset; } @Override public boolean isXmlMappingEnabled() { return xmlMappingEnabled; } @Override public boolean isAllowExtensionsInCdi() { return allowExtensionsInCdi; } @Override public boolean isXmlFormatMapperLegacyFormatEnabled() { return xmlFormatMapperLegacyFormat; } /** * Yuck. This is needed because JPA lets users define "global building options" * in {@code orm.xml} mappings. Forget that there are generally multiple * {@code orm.xml} mappings if using XML approach... Ugh */ public void apply(JpaOrmXmlPersistenceUnitDefaults jpaOrmXmlPersistenceUnitDefaults) { if ( !mappingDefaults.shouldImplicitlyQuoteIdentifiers() ) { mappingDefaults.implicitlyQuoteIdentifiers = jpaOrmXmlPersistenceUnitDefaults.shouldImplicitlyQuoteIdentifiers(); } if ( mappingDefaults.getImplicitCatalogName() == null ) { mappingDefaults.implicitCatalogName = nullIfEmpty( jpaOrmXmlPersistenceUnitDefaults.getDefaultCatalogName() ); } if ( mappingDefaults.getImplicitSchemaName() == null ) { mappingDefaults.implicitSchemaName = nullIfEmpty( jpaOrmXmlPersistenceUnitDefaults.getDefaultSchemaName() ); } } @Override public void apply(PersistenceUnitMetadata persistenceUnitMetadata) { if ( !mappingDefaults.implicitlyQuoteIdentifiers ) { mappingDefaults.implicitlyQuoteIdentifiers = persistenceUnitMetadata.useQuotedIdentifiers(); } if ( mappingDefaults.getImplicitCatalogName() == null ) { mappingDefaults.implicitCatalogName = nullIfEmpty( persistenceUnitMetadata.getDefaultCatalog() ); } if ( mappingDefaults.getImplicitSchemaName() == null ) { mappingDefaults.implicitSchemaName = nullIfEmpty( persistenceUnitMetadata.getDefaultSchema() ); } } public void setBootstrapContext(BootstrapContext bootstrapContext) { this.bootstrapContext = bootstrapContext; } } private static TimeZoneStorageType resolveTimeZoneStorageStrategy( ConfigurationService configService) { return configService.getSetting( MappingSettings.TIMEZONE_DEFAULT_STORAGE, value -> TimeZoneStorageType.valueOf( value.toString() ), TimeZoneStorageType.DEFAULT ); } private static WrapperArrayHandling resolveWrapperArrayHandling( ConfigurationService configService) { return coalesceSuppliedValues( () -> configService.getSetting( MappingSettings.WRAPPER_ARRAY_HANDLING, WrapperArrayHandling::interpretExternalSettingLeniently ), () -> resolveFallbackWrapperArrayHandling( configService ) ); } private static WrapperArrayHandling pickWrapperArrayHandling(Dialect dialect) { if ( dialect.supportsStandardArrays() && ( dialect.getPreferredSqlTypeCodeForArray() == SqlTypes.ARRAY || dialect.getPreferredSqlTypeCodeForArray() == SqlTypes.SQLXML ) ) { return WrapperArrayHandling.ALLOW; } else { return WrapperArrayHandling.LEGACY; } } private static WrapperArrayHandling resolveFallbackWrapperArrayHandling( ConfigurationService configService) { return configService.getSetting( JpaComplianceSettings.JPA_COMPLIANCE, BOOLEAN, false ) ? WrapperArrayHandling.PICK // JPA compliance was enabled. Use PICK : WrapperArrayHandling.DISALLOW; } }
apache/manifoldcf
36,215
connectors/csws/connector/src/main/java/org/apache/manifoldcf/authorities/authorities/csws/CswsAuthority.java
/* $Id: CswsAuthority.java 988245 2010-08-23 18:39:35Z kwright $ */ /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.manifoldcf.authorities.authorities.csws; import com.opentext.livelink.service.memberservice.MemberRight; import org.apache.manifoldcf.core.interfaces.*; import org.apache.manifoldcf.agents.interfaces.*; import org.apache.manifoldcf.authorities.interfaces.*; import org.apache.manifoldcf.authorities.system.Logging; import org.apache.manifoldcf.connectorcommon.interfaces.*; import com.opentext.livelink.service.memberservice.User; import com.opentext.livelink.service.memberservice.Member; import com.opentext.livelink.service.memberservice.MemberPrivileges; import org.apache.manifoldcf.csws.*; import java.io.*; import java.util.*; /** This is the Csws implementation of the IAuthorityConnector interface. * * Access tokens for livelink are simply user and usergroup node identifiers. Therefore, * this class retrieves those using the standard livelink call, being sure to map anything * that looks like an active directory user name to something that looks like a Csws * domain/username form. * */ public class CswsAuthority extends org.apache.manifoldcf.authorities.authorities.BaseAuthorityConnector { public static final String _rcsid = "@(#)$Id: CswsAuthority.java 988245 2010-08-23 18:39:35Z kwright $"; //Forward to the javascript to check the configuration parameters. private static final String EDIT_CONFIGURATION_JS = "editConfiguration.js"; //Forward to the HTML template to edit the configuration parameters. private static final String EDIT_CONFIGURATION_SERVER_HTML = "editConfiguration_Server.html"; private static final String EDIT_CONFIGURATION_CACHE_HTML = "editConfiguration_Cache.html"; //Forward to the HTML template to view the configuration parameters. private static final String VIEW_CONFIGURATION_HTML = "viewConfiguration.html"; // Signal that we have set up connection parameters properly private boolean hasSessionParameters = false; // Session expiration time private long expirationTime = -1L; // Idle session expiration interval private final static long expirationInterval = 300000L; // Data from the parameters private String serverProtocol = null; private String serverName = null; private int serverPort = -1; private String serverUsername = null; private String serverPassword = null; private String authenticationServicePath = null; //private String documentManagementServicePath = null; //private String contentServiceServicePath = null; private String memberServiceServicePath = null; //private String searchServiceServicePath = null; private String serverHTTPNTLMDomain = null; private String serverHTTPNTLMUsername = null; private String serverHTTPNTLMPassword = null; private IKeystoreManager serverHTTPSKeystore = null; // Data required for maintaining Csws connection private CswsSession cswsSession = null; // Cache variables private String cacheLifetime = null; private String cacheLRUsize = null; private long responseLifetime = 60000L; private int LRUsize = 1000; /** Cache manager. */ protected ICacheManager cacheManager = null; // Csws does not have "deny" permissions, and there is no such thing as a document with no tokens, so it is safe to not have a local "deny" token. // However, people feel that a suspenders-and-belt approach is called for, so this restriction has been added. // Csws tokens are numbers, "SYSTEM", or "GUEST", so they can't collide with the standard form. /** Constructor. */ public CswsAuthority() { } /** Set thread context. */ @Override public void setThreadContext(IThreadContext tc) throws ManifoldCFException { super.setThreadContext(tc); cacheManager = CacheManagerFactory.make(tc); } /** Clear thread context. */ @Override public void clearThreadContext() { super.clearThreadContext(); cacheManager = null; } /** Connect. The configuration parameters are included. *@param configParams are the configuration parameters for this connection. */ @Override public void connect(ConfigParams configParams) { super.connect(configParams); } /** Initialize the parameters, including the ones needed for caching. */ protected void getSessionParameters() throws ManifoldCFException { if (!hasSessionParameters) { // Server parameters serverProtocol = params.getParameter(CswsParameters.serverProtocol); serverName = params.getParameter(CswsParameters.serverName); String serverPortString = params.getParameter(CswsParameters.serverPort); serverUsername = params.getParameter(CswsParameters.serverUsername); serverPassword = params.getObfuscatedParameter(CswsParameters.serverPassword); authenticationServicePath = params.getParameter(CswsParameters.authenticationPath); //documentManagementServicePath = params.getParameter(CswsParameters.documentManagementPath); //contentServiceServicePath = params.getParameter(CswsParameters.contentServicePath); memberServiceServicePath = params.getParameter(CswsParameters.memberServicePath); //searchServiceServicePath = params.getParameter(CswsParameters.searchServicePath); serverHTTPNTLMDomain = params.getParameter(CswsParameters.serverHTTPNTLMDomain); serverHTTPNTLMUsername = params.getParameter(CswsParameters.serverHTTPNTLMUsername); serverHTTPNTLMPassword = params.getObfuscatedParameter(CswsParameters.serverHTTPNTLMPassword); // Server parameter processing if (serverProtocol == null || serverProtocol.length() == 0) serverProtocol = "http"; if (serverPortString == null) serverPort = 2099; else serverPort = new Integer(serverPortString).intValue(); if (serverHTTPNTLMDomain != null && serverHTTPNTLMDomain.length() == 0) serverHTTPNTLMDomain = null; if (serverHTTPNTLMUsername == null || serverHTTPNTLMUsername.length() == 0) { serverHTTPNTLMUsername = null; serverHTTPNTLMPassword = null; } // Set up server ssl if indicated String serverHTTPSKeystoreData = params.getParameter(CswsParameters.serverHTTPSKeystore); if (serverHTTPSKeystoreData != null) serverHTTPSKeystore = KeystoreManagerFactory.make("",serverHTTPSKeystoreData); cacheLifetime = params.getParameter(CswsParameters.cacheLifetime); if (cacheLifetime == null) cacheLifetime = "1"; cacheLRUsize = params.getParameter(CswsParameters.cacheLRUSize); if (cacheLRUsize == null) cacheLRUsize = "1000"; try { responseLifetime = Long.parseLong(this.cacheLifetime) * 60L * 1000L; LRUsize = Integer.parseInt(this.cacheLRUsize); } catch (NumberFormatException e) { throw new ManifoldCFException("Cache lifetime or Cache LRU size must be an integer: "+e.getMessage(),e); } if (Logging.authorityConnectors.isDebugEnabled()) { String passwordExists = (serverPassword!=null && serverPassword.length() > 0)?"password exists":""; Logging.authorityConnectors.debug("Csws: Csws connection parameters: Server='"+serverName+"'; port='"+serverPort+"'; user name='"+serverUsername+"'; "+passwordExists); } hasSessionParameters = true; } } /** Set up a session. */ protected void getSession() throws ManifoldCFException, ServiceInterruption { getSessionParameters(); if (cswsSession == null) { // Construct the various URLs we need final String baseURL = serverProtocol + "://" + serverName + ":" + serverPort; final String authenticationServiceURL = baseURL + authenticationServicePath; //final String documentManagementServiceURL = baseURL + documentManagementServicePath; //final String contentServiceServiceURL = baseURL + contentServiceServicePath; final String memberServiceServiceURL = baseURL + memberServiceServicePath; //final String searchServiceServiceURL = baseURL + searchServiceServicePath; if (Logging.authorityConnectors.isDebugEnabled()) { Logging.authorityConnectors.debug("Csws: Csws session created."); } // Construct a new csws session object for setting up this session cswsSession = new CswsSession(serverUsername, serverPassword, serverHTTPSKeystore, 1000L * 60L * 15L, authenticationServiceURL, null, null, memberServiceServiceURL, null); } expirationTime = System.currentTimeMillis() + expirationInterval; } // All methods below this line will ONLY be called if a connect() call succeeded // on this instance! /** Check connection for sanity. */ @Override public String check() throws ManifoldCFException { try { // Reestablish the session cswsSession = null; getSession(); final User user = cswsSession.getUserByLoginName(this.serverUsername); if (user != null) { return super.check(); } return "Connection failed: User authentication failed"; } catch (ServiceInterruption e) { return "Temporary service interruption: "+e.getMessage(); } catch (ManifoldCFException e) { return "Connection failed: "+e.getMessage(); } } /** This method is periodically called for all connectors that are connected but not * in active use. */ @Override public void poll() throws ManifoldCFException { if (cswsSession == null) return; long currentTime = System.currentTimeMillis(); if (currentTime >= expirationTime) { expirationTime = -1L; cswsSession = null; } } /** This method is called to assess whether to count this connector instance should * actually be counted as being connected. *@return true if the connector instance is actually connected. */ @Override public boolean isConnected() { return cswsSession != null; } /** Close the connection. Call this before discarding the repository connector. */ @Override public void disconnect() throws ManifoldCFException { hasSessionParameters = false; cswsSession = null; expirationTime = -1L; serverProtocol = null; serverName = null; serverPort = -1; serverUsername = null; serverPassword = null; authenticationServicePath = null; //documentManagementServicePath = null; //contentServiceServicePath = null; memberServiceServicePath = null; //searchServiceServicePath = null; serverHTTPNTLMDomain = null; serverHTTPNTLMUsername = null; serverHTTPNTLMPassword = null; serverHTTPSKeystore = null; cacheLifetime = null; cacheLRUsize = null; super.disconnect(); } /** Obtain the access tokens for a given user name. *@param userName is the user name or identifier. *@return the response tokens (according to the current authority). * (Should throws an exception only when a condition cannot be properly described within the authorization response object.) */ @Override public AuthorizationResponse getAuthorizationResponse(String userName) throws ManifoldCFException { // We need the session parameters here getSessionParameters(); // Construct a cache description object ICacheDescription objectDescription = new AuthorizationResponseDescription(userName, serverProtocol,serverName,serverPort, serverUsername,serverPassword, authenticationServicePath, memberServiceServicePath, serverHTTPSKeystore, responseLifetime,LRUsize); // Enter the cache ICacheHandle ch = cacheManager.enterCache(new ICacheDescription[]{objectDescription},null,null); try { ICacheCreateHandle createHandle = cacheManager.enterCreateSection(ch); try { // Lookup the object AuthorizationResponse response = (AuthorizationResponse)cacheManager.lookupObject(createHandle,objectDescription); if (response != null) return response; // Create the object. response = getAuthorizationResponseUncached(userName); // Save it in the cache cacheManager.saveObject(createHandle,objectDescription,response); // And return it... return response; } finally { cacheManager.leaveCreateSection(createHandle); } } finally { cacheManager.leaveCache(ch); } } /** Uncached method to get access tokens for a user name. */ protected AuthorizationResponse getAuthorizationResponseUncached(String userName) throws ManifoldCFException { try { getSession(); // First, do what's necessary to map the user name that comes in to a reasonable // Csws domain\\user combination. if (Logging.authorityConnectors.isDebugEnabled()) { Logging.authorityConnectors.debug("Authentication user name = '"+userName+"'"); } String domainAndUser = userName; if (Logging.authorityConnectors.isDebugEnabled()) { Logging.authorityConnectors.debug("Csws: Csws user name = '"+domainAndUser+"'"); } ArrayList list = new ArrayList(); // Find out if the specified user is a member of the Guest group, or is a member // of the System group. // Get information about the current user. This is how we will determine if the // user exists, and also what permissions s/he has. final User user = cswsSession.getUserByLoginName(domainAndUser); if (user == null) { if (Logging.authorityConnectors.isDebugEnabled()) Logging.authorityConnectors.debug("Csws: Csws user '"+domainAndUser+"' does not exist"); return RESPONSE_USERNOTFOUND; } if (user.isDeleted()) { if (Logging.authorityConnectors.isDebugEnabled()) Logging.authorityConnectors.debug("Csws: Csws user '"+domainAndUser+"' has been deleted"); // Since the user cannot become undeleted, then this should be treated as 'user does not exist'. return RESPONSE_USERNOTFOUND; } final MemberPrivileges memberPrivileges = user.getPrivileges(); if (memberPrivileges.isPublicAccessEnabled()) { // if ((privs & LAPI_USERS.PRIV_PERM_WORLD) == LAPI_USERS.PRIV_PERM_WORLD) ?? list.add("GUEST"); } if (memberPrivileges. isCanAdministerSystem()) { // if ((privs & LAPI_USERS.PRIV_PERM_BYPASS) == LAPI_USERS.PRIV_PERM_BYPASS) list.add("SYSTEM"); } final Member member = cswsSession.getMemberByLoginName(domainAndUser); if (member == null) { if (Logging.authorityConnectors.isDebugEnabled()) Logging.authorityConnectors.debug("Csws: Csws member '"+domainAndUser+"' does not exist"); return RESPONSE_USERNOTFOUND; } final List<? extends MemberRight> memberRights = cswsSession.listRightsByMemberId(member.getID()); if (memberRights == null) { if (Logging.authorityConnectors.isDebugEnabled()) Logging.authorityConnectors.debug("Csws: Csws error looking up user rights for '"+domainAndUser+"' - user does not exist"); return RESPONSE_USERNOTFOUND; } // We need also to add in support for the special rights objects. These are: // -1: RIGHT_WORLD // -2: RIGHT_SYSTEM // -3: RIGHT_OWNER // -4: RIGHT_GROUP // // RIGHT_WORLD means guest access. // RIGHT_SYSTEM is "Public Access". // RIGHT_OWNER is access by the owner of the object. // RIGHT_GROUP is access by a member of the base group containing the owner // // These objects are returned by the corresponding GetObjectRights() call made during // the ingestion process. We have to figure out how to map these to things that are // the equivalent of acls. // Idea: // 1) RIGHT_WORLD is based on some property of the user. // 2) RIGHT_SYSTEM is based on some property of the user. // 3) RIGHT_OWNER and RIGHT_GROUP are managed solely in the ingestion side of the world. // NOTE: It turns out that -1 and -2 are in fact returned as part of the list of // rights requested above. They get mapped to special keywords already in the above // code, so it *may* be reasonable to filter them from here. It's not a real problem because // it's effectively just a duplicate of what we are doing. final String[] rval = new String[memberRights.size()]; int j = 0; for (final MemberRight memberRight : memberRights) { rval[j++] = Long.toString(memberRight.getID()); } return new AuthorizationResponse(rval,AuthorizationResponse.RESPONSE_OK); } catch (ServiceInterruption e) { Logging.authorityConnectors.warn("Csws: Server seems to be down: "+e.getMessage(),e); return RESPONSE_UNREACHABLE; } } /** Obtain the default access tokens for a given user name. *@param userName is the user name or identifier. *@return the default response tokens, presuming that the connect method fails. */ @Override public AuthorizationResponse getDefaultAuthorizationResponse(String userName) { // The default response if the getConnection method fails return RESPONSE_UNREACHABLE; } // UI support methods. // // These support methods are involved in setting up authority connection configuration information. The configuration methods cannot assume that the // current authority object is connected. That is why they receive a thread context argument. /** Output the configuration header section. * This method is called in the head section of the connector's configuration page. Its purpose is to add the required tabs to the list, and to output any * javascript methods that might be needed by the configuration editing HTML. *@param threadContext is the local thread context. *@param out is the output to which any HTML should be sent. *@param parameters are the configuration parameters, as they currently exist, for this connection being configured. *@param tabsArray is an array of tab names. Add to this array any tab names that are specific to the connector. */ @Override public void outputConfigurationHeader(IThreadContext threadContext, IHTTPOutput out, Locale locale, ConfigParams parameters, List<String> tabsArray) throws ManifoldCFException, IOException { tabsArray.add(Messages.getString(locale,"CswsConnector.Server")); tabsArray.add(Messages.getString(locale,"CswsConnector.Cache")); Messages.outputResourceWithVelocity(out, locale, EDIT_CONFIGURATION_JS, null, true); } /** Output the configuration body section. * This method is called in the body section of the authority connector's configuration page. Its purpose is to present the required form elements for editing. * The coder can presume that the HTML that is output from this configuration will be within appropriate &lt;html&gt;, &lt;body&gt;, and &lt;form&gt; tags. The name of the * form is "editconnection". *@param threadContext is the local thread context. *@param out is the output to which any HTML should be sent. *@param parameters are the configuration parameters, as they currently exist, for this connection being configured. *@param tabName is the current tab name. */ @Override public void outputConfigurationBody(IThreadContext threadContext, IHTTPOutput out, Locale locale, ConfigParams parameters, String tabName) throws ManifoldCFException, IOException { Map<String, Object> velocityContext = new HashMap<>(); velocityContext.put("TabName",tabName); fillInServerTab(velocityContext, out, parameters); fillInCacheTab(velocityContext, out, parameters); Messages.outputResourceWithVelocity(out, locale, EDIT_CONFIGURATION_SERVER_HTML, velocityContext); Messages.outputResourceWithVelocity(out, locale, EDIT_CONFIGURATION_CACHE_HTML, velocityContext); } /** Fill in Server tab */ protected static void fillInServerTab(Map<String,Object> velocityContext, IHTTPOutput out, ConfigParams parameters) { // LAPI parameters String serverProtocol = parameters.getParameter(CswsParameters.serverProtocol); if (serverProtocol == null) serverProtocol = "http"; String serverName = parameters.getParameter(CswsParameters.serverName); if (serverName == null) serverName = "localhost"; String serverPort = parameters.getParameter(CswsParameters.serverPort); if (serverPort == null) serverPort = "2099"; String serverUserName = parameters.getParameter(CswsParameters.serverUsername); if(serverUserName == null) serverUserName = ""; String serverPassword = parameters.getObfuscatedParameter(CswsParameters.serverPassword); if (serverPassword == null) serverPassword = ""; else serverPassword = out.mapPasswordToKey(serverPassword); String authenticationServicePath = parameters.getParameter(CswsParameters.authenticationPath); if (authenticationServicePath == null) authenticationServicePath = CswsParameters.authenticationPathDefault; String memberServiceServicePath = parameters.getParameter(CswsParameters.memberServicePath); if (memberServiceServicePath == null) memberServiceServicePath = CswsParameters.memberServicePathDefault; String serverHTTPNTLMDomain = parameters.getParameter(CswsParameters.serverHTTPNTLMDomain); if(serverHTTPNTLMDomain == null) serverHTTPNTLMDomain = ""; String serverHTTPNTLMUserName = parameters.getParameter(CswsParameters.serverHTTPNTLMUsername); if(serverHTTPNTLMUserName == null) serverHTTPNTLMUserName = ""; String serverHTTPNTLMPassword = parameters.getObfuscatedParameter(CswsParameters.serverHTTPNTLMPassword); if (serverHTTPNTLMPassword == null) serverHTTPNTLMPassword = ""; else serverHTTPNTLMPassword = out.mapPasswordToKey(serverHTTPNTLMPassword); String serverHTTPSKeystore = parameters.getParameter(CswsParameters.serverHTTPSKeystore); IKeystoreManager localServerHTTPSKeystore; Map<String,String> serverCertificatesMap = null; String message = null; try { if (serverHTTPSKeystore == null) localServerHTTPSKeystore = KeystoreManagerFactory.make(""); else localServerHTTPSKeystore = KeystoreManagerFactory.make("",serverHTTPSKeystore); // List the individual certificates in the store, with a delete button for each String[] contents = localServerHTTPSKeystore.getContents(); if (contents.length > 0) { serverCertificatesMap = new HashMap<>(); int i = 0; while (i < contents.length) { String alias = contents[i]; String description = localServerHTTPSKeystore.getDescription(alias); if (description.length() > 128) description = description.substring(0,125) + "..."; serverCertificatesMap.put(alias, description); i++; } } } catch (ManifoldCFException e) { message = e.getMessage(); Logging.authorityConnectors.warn(e); } velocityContext.put("SERVERPROTOCOL",serverProtocol); velocityContext.put("SERVERNAME",serverName); velocityContext.put("SERVERPORT",serverPort); velocityContext.put("SERVERUSERNAME",serverUserName); velocityContext.put("SERVERPASSWORD",serverPassword); velocityContext.put("AUTHENTICATIONSERVICEPATH", authenticationServicePath); velocityContext.put("MEMBERSERVICESERVICEPATH", memberServiceServicePath); velocityContext.put("SERVERHTTPNTLMDOMAIN",serverHTTPNTLMDomain); velocityContext.put("SERVERHTTPNTLMUSERNAME",serverHTTPNTLMUserName); velocityContext.put("SERVERHTTPNTLMPASSWORD",serverHTTPNTLMPassword); if(serverHTTPSKeystore != null) velocityContext.put("SERVERHTTPSKEYSTORE",serverHTTPSKeystore); if(serverCertificatesMap != null) velocityContext.put("SERVERCERTIFICATESMAP", serverCertificatesMap); if(message != null) velocityContext.put("MESSAGE", message); } /** Fill in Cache tab */ private void fillInCacheTab(Map<String, Object> velocityContext, IHTTPOutput out, ConfigParams parameters) { String cacheLifetime = parameters.getParameter(CswsParameters.cacheLifetime); if (cacheLifetime == null) cacheLifetime = "1"; String cacheLRUsize = parameters.getParameter(CswsParameters.cacheLRUSize); if (cacheLRUsize == null) cacheLRUsize = "1000"; velocityContext.put("CACHELIFETIME",cacheLifetime); velocityContext.put("CACHELRUSIZE",cacheLRUsize); } /** Process a configuration post. * This method is called at the start of the authority connector's configuration page, whenever there is a possibility that form data for a connection has been * posted. Its purpose is to gather form information and modify the configuration parameters accordingly. * The name of the posted form is "editconnection". *@param threadContext is the local thread context. *@param variableContext is the set of variables available from the post, including binary file post information. *@param parameters are the configuration parameters, as they currently exist, for this connection being configured. *@return null if all is well, or a string error message if there is an error that should prevent saving of the connection (and cause a redirection to an error page). */ @Override public String processConfigurationPost(IThreadContext threadContext, IPostParameters variableContext, Locale locale, ConfigParams parameters) throws ManifoldCFException { // Server parameters String serverProtocol = variableContext.getParameter("serverprotocol"); if (serverProtocol != null) parameters.setParameter(CswsParameters.serverProtocol,serverProtocol); String serverName = variableContext.getParameter("servername"); if (serverName != null) parameters.setParameter(CswsParameters.serverName,serverName); String serverPort = variableContext.getParameter("serverport"); if (serverPort != null) parameters.setParameter(CswsParameters.serverPort,serverPort); String serverUserName = variableContext.getParameter("serverusername"); if (serverUserName != null) parameters.setParameter(CswsParameters.serverUsername,serverUserName); String serverPassword = variableContext.getParameter("serverpassword"); if (serverPassword != null) parameters.setObfuscatedParameter(CswsParameters.serverPassword,variableContext.mapKeyToPassword(serverPassword)); String authenticationServicePath = variableContext.getParameter("authenticationservicepath"); if (authenticationServicePath != null) parameters.setParameter(CswsParameters.authenticationPath, authenticationServicePath); String memberServiceServicePath = variableContext.getParameter("memberserviceservicepath"); if (memberServiceServicePath != null) parameters.setParameter(CswsParameters.memberServicePath, memberServiceServicePath); String serverHTTPSKeystoreValue = variableContext.getParameter("serverhttpskeystoredata"); final String serverConfigOp = variableContext.getParameter("serverconfigop"); if (serverConfigOp != null) { if (serverConfigOp.equals("Delete")) { String alias = variableContext.getParameter("serverkeystorealias"); final IKeystoreManager mgr; if (serverHTTPSKeystoreValue != null) mgr = KeystoreManagerFactory.make("",serverHTTPSKeystoreValue); else mgr = KeystoreManagerFactory.make(""); mgr.remove(alias); serverHTTPSKeystoreValue = mgr.getString(); } else if (serverConfigOp.equals("Add")) { String alias = IDFactory.make(threadContext); byte[] certificateValue = variableContext.getBinaryBytes("servercertificate"); final IKeystoreManager mgr; if (serverHTTPSKeystoreValue != null) mgr = KeystoreManagerFactory.make("",serverHTTPSKeystoreValue); else mgr = KeystoreManagerFactory.make(""); java.io.InputStream is = new java.io.ByteArrayInputStream(certificateValue); String certError = null; try { mgr.importCertificate(alias,is); } catch (Throwable e) { certError = e.getMessage(); } finally { try { is.close(); } catch (IOException e) { // Eat this exception } } if (certError != null) { return "Illegal certificate: "+certError; } serverHTTPSKeystoreValue = mgr.getString(); } } parameters.setParameter(CswsParameters.serverHTTPSKeystore,serverHTTPSKeystoreValue); // Cache parameters String cacheLifetime = variableContext.getParameter("cachelifetime"); if (cacheLifetime != null) parameters.setParameter(CswsParameters.cacheLifetime,cacheLifetime); String cacheLRUsize = variableContext.getParameter("cachelrusize"); if (cacheLRUsize != null) parameters.setParameter(CswsParameters.cacheLRUSize,cacheLRUsize); return null; } /** View configuration. * This method is called in the body section of the authority connector's view configuration page. Its purpose is to present the connection information to the user. * The coder can presume that the HTML that is output from this configuration will be within appropriate &lt;html&gt; and &lt;body&gt; tags. *@param threadContext is the local thread context. *@param out is the output to which any HTML should be sent. *@param parameters are the configuration parameters, as they currently exist, for this connection being configured. */ @Override public void viewConfiguration(IThreadContext threadContext, IHTTPOutput out, Locale locale, ConfigParams parameters) throws ManifoldCFException, IOException { Map<String, Object> paramMap = new HashMap<>(); Map<String,String> configMap = new HashMap<>(); Iterator iter = parameters.listParameters(); while (iter.hasNext()) { String param = (String)iter.next(); String value = parameters.getParameter(param); if (param.length() >= "password".length() && param.substring(param.length()-"password".length()).equalsIgnoreCase("password")) { configMap.put(org.apache.manifoldcf.ui.util.Encoder.bodyEscape(param),"********"); } else if (param.length() >="keystore".length() && param.substring(param.length()-"keystore".length()).equalsIgnoreCase("keystore") || param.length() > "truststore".length() && param.substring(param.length()-"truststore".length()).equalsIgnoreCase("truststore")) { IKeystoreManager kmanager = KeystoreManagerFactory.make("",value); configMap.put(org.apache.manifoldcf.ui.util.Encoder.bodyEscape(param),"=&lt;"+Integer.toString(kmanager.getContents().length)+Messages.getBodyString(locale,"CswsConnector.certificates")+"&gt;"); } else { configMap.put(org.apache.manifoldcf.ui.util.Encoder.bodyEscape(param), org.apache.manifoldcf.ui.util.Encoder.bodyEscape(value)); } } paramMap.put("CONFIGMAP",configMap); Messages.outputResourceWithVelocity(out, locale, VIEW_CONFIGURATION_HTML, paramMap); } protected static StringSet emptyStringSet = new StringSet(); /** This is the cache object descriptor for cached access tokens from * this connector. */ protected static class AuthorizationResponseDescription extends org.apache.manifoldcf.core.cachemanager.BaseDescription { /** The user name associated with the access tokens */ protected final String userName; // The server connection parameters protected final String serverProtocol; protected final String serverName; protected final int serverPort; protected final String serverUsername; protected final String serverPassword; protected final String authenticationServicePath; protected final String memberServicePath; protected final String serverHTTPSKeystore; protected long responseLifetime; /** The expiration time */ protected long expirationTime = -1; /** Constructor. */ public AuthorizationResponseDescription(String userName, String serverProtocol, String serverName, int serverPort, String serverUsername, String serverPassword, String authenticationServicePath, String memberServicePath, IKeystoreManager serverHTTPSKeystore, long responseLifetime, int LRUsize) throws ManifoldCFException { super("CswsAuthority",LRUsize); this.userName = userName; this.serverProtocol = serverProtocol; this.serverName = serverName; this.serverPort = serverPort; this.serverUsername = serverUsername; this.serverPassword = serverPassword; this.authenticationServicePath = authenticationServicePath; this.memberServicePath = memberServicePath; if (serverHTTPSKeystore != null) this.serverHTTPSKeystore = serverHTTPSKeystore.getString(); else this.serverHTTPSKeystore = null; this.responseLifetime = responseLifetime; } /** Return the invalidation keys for this object. */ public StringSet getObjectKeys() { return emptyStringSet; } /** Get the critical section name, used for synchronizing the creation of the object */ public String getCriticalSectionName() { return getClass().getName() + "-" + userName + "-" + serverProtocol + "-" + serverName + "-" + Integer.toString(serverPort) + "-" + serverUsername + "-" + serverPassword + "-" + authenticationServicePath + "-" + memberServicePath + "-" + ((serverHTTPSKeystore==null)?"":serverHTTPSKeystore); } /** Return the object expiration interval */ public long getObjectExpirationTime(long currentTime) { if (expirationTime == -1) expirationTime = currentTime + responseLifetime; return expirationTime; } public int hashCode() { return userName.hashCode() + serverProtocol.hashCode() + serverName.hashCode() + new Integer(serverPort).hashCode() + serverUsername.hashCode() + serverPassword.hashCode() + authenticationServicePath.hashCode() + memberServicePath.hashCode() + ((serverHTTPSKeystore==null)?0:serverHTTPSKeystore.hashCode()); } public boolean equals(Object o) { if (!(o instanceof AuthorizationResponseDescription)) return false; AuthorizationResponseDescription ard = (AuthorizationResponseDescription)o; return ard.userName.equals(userName) && ard.serverProtocol.equals(serverProtocol) && ard.serverName.equals(serverName) && ard.serverPort == serverPort && ard.serverUsername.equals(serverUsername) && ard.serverPassword.equals(serverPassword) && ard.authenticationServicePath.equals(authenticationServicePath) && ard.memberServicePath.equals(memberServicePath) && ((ard.serverHTTPSKeystore != null && serverHTTPSKeystore != null && ard.serverHTTPSKeystore.equals(serverHTTPSKeystore)) || ((ard.serverHTTPSKeystore == null || serverHTTPSKeystore == null) && ard.serverHTTPSKeystore == serverHTTPSKeystore)); } } }
apache/jackrabbit
36,148
jackrabbit-data/src/main/java/org/apache/jackrabbit/core/data/db/DbDataStore.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.core.data.db; import org.apache.commons.io.IOUtils; import org.apache.commons.io.input.CountingInputStream; import org.apache.jackrabbit.core.data.AbstractDataStore; import org.apache.jackrabbit.core.data.DataIdentifier; import org.apache.jackrabbit.core.data.DataRecord; import org.apache.jackrabbit.core.data.DataStoreException; import org.apache.jackrabbit.core.data.MultiDataStoreAware; import org.apache.jackrabbit.core.util.db.CheckSchemaOperation; import org.apache.jackrabbit.core.util.db.ConnectionFactory; import org.apache.jackrabbit.core.util.db.ConnectionHelper; import org.apache.jackrabbit.core.util.db.DatabaseAware; import org.apache.jackrabbit.core.util.db.DbUtility; import org.apache.jackrabbit.core.util.db.StreamWrapper; import org.apache.jackrabbit.util.Text; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.BufferedInputStream; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.lang.ref.WeakReference; import java.security.DigestInputStream; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.UUID; import java.util.WeakHashMap; import javax.jcr.RepositoryException; import javax.sql.DataSource; /** * A data store implementation that stores the records in a database using JDBC. * * Configuration: * <pre> * &lt;DataStore class="org.apache.jackrabbit.core.data.db.DbDataStore"&gt; * &lt;param name="{@link #setUrl(String) url}" value="jdbc:postgresql:test"/&gt; * &lt;param name="{@link #setUser(String) user}" value="sa"/&gt; * &lt;param name="{@link #setPassword(String) password}" value="sa"/&gt; * &lt;param name="{@link #setDatabaseType(String) databaseType}" value="postgresql"/&gt; * &lt;param name="{@link #setDriver(String) driver}" value="org.postgresql.Driver"/&gt; * &lt;param name="{@link #setMinRecordLength(int) minRecordLength}" value="1024"/&gt; * &lt;param name="{@link #setMaxConnections(int) maxConnections}" value="2"/&gt; * &lt;param name="{@link #setCopyWhenReading(boolean) copyWhenReading}" value="true"/&gt; * &lt;param name="{@link #setTablePrefix(String) tablePrefix}" value=""/&gt; * &lt;param name="{@link #setSchemaObjectPrefix(String) schemaObjectPrefix}" value=""/&gt; * &lt;param name="{@link #setSchemaCheckEnabled(boolean) schemaCheckEnabled}" value="true"/&gt; * &lt;/DataStore&gt; * </pre> * <p> * Only URL, user name and password usually need to be set. * The remaining settings are generated using the database URL sub-protocol from the * database type resource file. * <p> * JNDI can be used to get the connection. In this case, use the javax.naming.InitialContext as the driver, * and the JNDI name as the URL. If the user and password are configured in the JNDI resource, * they should not be configured here. Example JNDI settings: * <pre> * &lt;param name="driver" value="javax.naming.InitialContext" /&gt; * &lt;param name="url" value="java:comp/env/jdbc/Test" /&gt; * </pre> * <p> * For Microsoft SQL Server 2005, there is a problem reading large BLOBs. You will need to use * the JDBC driver version 1.2 or newer, and append ;responseBuffering=adaptive to the database URL. * Don't append ;selectMethod=cursor, otherwise it can still run out of memory. * Example database URL: jdbc:sqlserver://localhost:4220;DatabaseName=test;responseBuffering=adaptive * <p> * By default, the data is copied to a temp file when reading, to avoid problems when reading multiple * blobs at the same time. * <p> * The tablePrefix can be used to specify a schema and / or catalog name: * &lt;param name="tablePrefix" value="ds."&gt; */ public class DbDataStore extends AbstractDataStore implements DatabaseAware, MultiDataStoreAware { /** * The default value for the minimum object size. */ public static final int DEFAULT_MIN_RECORD_LENGTH = 100; /** * Write to a temporary file to get the length (slow, but always works). * This is the default setting. */ public static final String STORE_TEMP_FILE = "tempFile"; /** * Call PreparedStatement.setBinaryStream(..., -1) */ public static final String STORE_SIZE_MINUS_ONE = "-1"; /** * Call PreparedStatement.setBinaryStream(..., Integer.MAX_VALUE) */ public static final String STORE_SIZE_MAX = "max"; /** * The digest algorithm used to uniquely identify records. */ protected static final String DIGEST = System.getProperty("ds.digest.algorithm", "SHA-256"); /** * The prefix used for temporary objects. */ protected static final String TEMP_PREFIX = "TEMP_"; /** * Logger instance */ private static Logger log = LoggerFactory.getLogger(DbDataStore.class); /** * The minimum modified date. If a file is accessed (read or write) with a modified date * older than this value, the modified date is updated to the current time. */ protected long minModifiedDate; /** * The database URL used. */ protected String url; /** * The database driver. */ protected String driver; /** * The user name. */ protected String user; /** * The password */ protected String password; /** * The database type used. */ protected String databaseType; /** * The minimum size of an object that should be stored in this data store. */ protected int minRecordLength = DEFAULT_MIN_RECORD_LENGTH; /** * The prefix for the datastore table, empty by default. */ protected String tablePrefix = ""; /** * The prefix of the table names. By default it is empty. */ protected String schemaObjectPrefix = ""; /** * Whether the schema check must be done during initialization. */ private boolean schemaCheckEnabled = true; /** * The logical name of the DataSource to use. */ protected String dataSourceName; /** * This is the property 'table' * in the [databaseType].properties file, initialized with the default value. */ protected String tableSQL = "DATASTORE"; /** * This is the property 'createTable' * in the [databaseType].properties file, initialized with the default value. */ protected String createTableSQL = "CREATE TABLE ${tablePrefix}${table}(ID VARCHAR(255) PRIMARY KEY, LENGTH BIGINT, LAST_MODIFIED BIGINT, DATA BLOB)"; /** * This is the property 'insertTemp' * in the [databaseType].properties file, initialized with the default value. */ protected String insertTempSQL = "INSERT INTO ${tablePrefix}${table} VALUES(?, 0, ?, NULL)"; /** * This is the property 'updateData' * in the [databaseType].properties file, initialized with the default value. */ protected String updateDataSQL = "UPDATE ${tablePrefix}${table} SET DATA=? WHERE ID=?"; /** * This is the property 'updateLastModified' * in the [databaseType].properties file, initialized with the default value. */ protected String updateLastModifiedSQL = "UPDATE ${tablePrefix}${table} SET LAST_MODIFIED=? WHERE ID=? AND LAST_MODIFIED<?"; /** * This is the property 'update' * in the [databaseType].properties file, initialized with the default value. */ protected String updateSQL = "UPDATE ${tablePrefix}${table} SET ID=?, LENGTH=?, LAST_MODIFIED=? " + "WHERE ID=? AND LAST_MODIFIED=?"; /** * This is the property 'delete' * in the [databaseType].properties file, initialized with the default value. */ protected String deleteSQL = "DELETE FROM ${tablePrefix}${table} WHERE ID=?"; /** * This is the property 'deleteOlder' * in the [databaseType].properties file, initialized with the default value. */ protected String deleteOlderSQL = "DELETE FROM ${tablePrefix}${table} WHERE LAST_MODIFIED<?"; /** * This is the property 'selectMeta' * in the [databaseType].properties file, initialized with the default value. */ protected String selectMetaSQL = "SELECT LENGTH, LAST_MODIFIED FROM ${tablePrefix}${table} WHERE ID=?"; /** * This is the property 'selectAll' * in the [databaseType].properties file, initialized with the default value. */ protected String selectAllSQL = "SELECT ID FROM ${tablePrefix}${table}"; /** * This is the property 'selectData' * in the [databaseType].properties file, initialized with the default value. */ protected String selectDataSQL = "SELECT ID, DATA FROM ${tablePrefix}${table} WHERE ID=?"; /** * The stream storing mechanism used. */ protected String storeStream = STORE_TEMP_FILE; /** * Copy the stream to a temp file before returning it. * Enabled by default to support concurrent reads. */ protected boolean copyWhenReading = true; /** * All data identifiers that are currently in use are in this set until they are garbage collected. */ protected Map<DataIdentifier, WeakReference<DataIdentifier>> inUse = Collections.synchronizedMap(new WeakHashMap<DataIdentifier, WeakReference<DataIdentifier>>()); /** * The temporary identifiers that are currently in use. */ protected List<String> temporaryInUse = Collections.synchronizedList(new ArrayList<String>()); /** * The {@link ConnectionHelper} set in the {@link #init(String)} method. * */ protected ConnectionHelper conHelper; /** * The repositories {@link ConnectionFactory}. */ private ConnectionFactory connectionFactory; public void setConnectionFactory(ConnectionFactory connnectionFactory) { this.connectionFactory = connnectionFactory; } public DataRecord addRecord(InputStream stream) throws DataStoreException { InputStream fileInput = null; String tempId = null; ResultSet rs = null; try { long tempModified; while (true) { try { tempModified = System.currentTimeMillis(); String id = UUID.randomUUID().toString(); tempId = TEMP_PREFIX + id; temporaryInUse.add(tempId); // SELECT LENGTH, LAST_MODIFIED FROM DATASTORE WHERE ID=? rs = conHelper.query(selectMetaSQL, tempId); boolean hasNext = rs.next(); DbUtility.close(rs); rs = null; if (hasNext) { // re-try in the very, very unlikely event that the row already exists continue; } // INSERT INTO DATASTORE VALUES(?, 0, ?, NULL) conHelper.exec(insertTempSQL, tempId, tempModified); break; } catch (Exception e) { throw convert("Can not insert new record", e); } finally { DbUtility.close(rs); // prevent that rs.close() is called again rs = null; } } MessageDigest digest = getDigest(); DigestInputStream dIn = new DigestInputStream(stream, digest); CountingInputStream in = new CountingInputStream(dIn); StreamWrapper wrapper; if (STORE_SIZE_MINUS_ONE.equals(storeStream)) { wrapper = new StreamWrapper(in, -1); } else if (STORE_SIZE_MAX.equals(storeStream)) { wrapper = new StreamWrapper(in, Integer.MAX_VALUE); } else if (STORE_TEMP_FILE.equals(storeStream)) { File temp = moveToTempFile(in); long length = temp.length(); wrapper = new StreamWrapper(new ResettableTempFileInputStream(temp), length); } else { throw new DataStoreException("Unsupported stream store algorithm: " + storeStream); } // UPDATE DATASTORE SET DATA=? WHERE ID=? conHelper.exec(updateDataSQL, wrapper, tempId); long length = in.getByteCount(); DataIdentifier identifier = new DataIdentifier(encodeHexString(digest.digest())); usesIdentifier(identifier); String id = identifier.toString(); long newModified; while (true) { newModified = System.currentTimeMillis(); if (checkExisting(tempId, length, identifier)) { touch(identifier, newModified); conHelper.exec(deleteSQL, tempId); break; } try { // UPDATE DATASTORE SET ID=?, LENGTH=?, LAST_MODIFIED=? // WHERE ID=? AND LAST_MODIFIED=? int count = conHelper.update(updateSQL, id, length, newModified, tempId, tempModified); // If update count is 0, the last modified time of the // temporary row was changed - which means we need to // re-try using a new last modified date (a later one) // because we need to ensure the new last modified date // is _newer_ than the old (otherwise the garbage // collection could delete rows) if (count != 0) { // update was successful break; } } catch (SQLException e) { // duplicate key (the row already exists) - repeat // we use exception handling for flow control here, which is bad, // but the alternative is to use UPDATE ... WHERE ... (SELECT ...) // which could cause a deadlock in some databases - also, // duplicate key will only occur if somebody else concurrently // added the same record (which is very unlikely) } // SELECT LENGTH, LAST_MODIFIED FROM DATASTORE WHERE ID=? rs = conHelper.query(selectMetaSQL, tempId); if (!rs.next()) { // the row was deleted, which is unexpected / not allowed String msg = DIGEST + " temporary entry deleted: " + " id=" + tempId + " length=" + length; log.error(msg); throw new DataStoreException(msg); } tempModified = rs.getLong(2); DbUtility.close(rs); rs = null; } usesIdentifier(identifier); DbDataRecord record = new DbDataRecord(this, identifier, length, newModified); return record; } catch (Exception e) { throw convert("Can not insert new record", e); } finally { if (tempId != null) { temporaryInUse.remove(tempId); } DbUtility.close(rs); if (fileInput != null) { try { fileInput.close(); } catch (IOException e) { throw convert("Can not close temporary file", e); } } } } /** * Check if a row with this ID already exists. * * @return true if the row exists and the length matches * @throw DataStoreException if a row exists, but the length is different */ private boolean checkExisting(String tempId, long length, DataIdentifier identifier) throws DataStoreException, SQLException { String id = identifier.toString(); // SELECT LENGTH, LAST_MODIFIED FROM DATASTORE WHERE ID=? ResultSet rs = null; try { rs = conHelper.query(selectMetaSQL, id); if (rs.next()) { long oldLength = rs.getLong(1); long lastModified = rs.getLong(2); if (oldLength != length) { String msg = DIGEST + " collision: temp=" + tempId + " id=" + id + " length=" + length + " oldLength=" + oldLength; log.error(msg); throw new DataStoreException(msg); } DbUtility.close(rs); rs = null; touch(identifier, lastModified); // row already exists conHelper.exec(deleteSQL, tempId); return true; } } finally { DbUtility.close(rs); } return false; } /** * Creates a temp file and copies the data there. * The input stream is closed afterwards. * * @param in the input stream * @return the file * @throws IOException */ private File moveToTempFile(InputStream in) throws IOException { File temp = File.createTempFile("dbRecord", null); writeToFileAndClose(in, temp); return temp; } private void writeToFileAndClose(InputStream in, File file) throws IOException { OutputStream out = new FileOutputStream(file); try { IOUtils.copy(in, out); } finally { IOUtils.closeQuietly(out); IOUtils.closeQuietly(in); } } public synchronized void deleteRecord(DataIdentifier identifier) throws DataStoreException { try { conHelper.exec(deleteSQL, identifier.toString()); } catch (Exception e) { throw convert("Can not delete record", e); } } public synchronized int deleteAllOlderThan(long min) throws DataStoreException { try { ArrayList<String> touch = new ArrayList<String>(); ArrayList<DataIdentifier> ids = new ArrayList<DataIdentifier>(inUse.keySet()); for (DataIdentifier identifier: ids) { if (identifier != null) { touch.add(identifier.toString()); } } touch.addAll(temporaryInUse); for (String key : touch) { updateLastModifiedDate(key, 0); } // DELETE FROM DATASTORE WHERE LAST_MODIFIED<? log.debug(deleteOlderSQL + " - Scanstart [" + min + "]"); return conHelper.update(deleteOlderSQL, min); } catch (Exception e) { throw convert("Can not delete records", e); } } public Iterator<DataIdentifier> getAllIdentifiers() throws DataStoreException { ArrayList<DataIdentifier> list = new ArrayList<DataIdentifier>(); ResultSet rs = null; try { // SELECT ID FROM DATASTORE rs = conHelper.query(selectAllSQL); while (rs.next()) { String id = rs.getString(1); if (!id.startsWith(TEMP_PREFIX)) { DataIdentifier identifier = new DataIdentifier(id); list.add(identifier); } } log.debug("Found " + list.size() + " identifiers."); return list.iterator(); } catch (Exception e) { throw convert("Can not read records", e); } finally { DbUtility.close(rs); } } public int getMinRecordLength() { return minRecordLength; } /** * Set the minimum object length. * The maximum value is around 32000. * * @param minRecordLength the length */ public void setMinRecordLength(int minRecordLength) { this.minRecordLength = minRecordLength; } public DataRecord getRecordIfStored(DataIdentifier identifier) throws DataStoreException { usesIdentifier(identifier); ResultSet rs = null; try { String id = identifier.toString(); // SELECT LENGTH, LAST_MODIFIED FROM DATASTORE WHERE ID = ? rs = conHelper.query(selectMetaSQL, id); if (!rs.next()) { return null; } long length = rs.getLong(1); long lastModified = rs.getLong(2); DbUtility.close(rs); rs = null; lastModified = touch(identifier, lastModified); return new DbDataRecord(this, identifier, length, lastModified); } catch (Exception e) { throw convert("Can not read identifier " + identifier, e); } finally { DbUtility.close(rs); } } /** * Open the input stream. This method sets those fields of the caller * that need to be closed once the input stream is read. * * @param inputStream the database input stream object * @param identifier data identifier * @throws DataStoreException if the data store could not be accessed, * or if the given identifier is invalid */ InputStream openStream(DbInputStream inputStream, DataIdentifier identifier) throws DataStoreException { ResultSet rs = null; try { // SELECT ID, DATA FROM DATASTORE WHERE ID = ? rs = conHelper.query(selectDataSQL, identifier.toString()); if (!rs.next()) { throw new DataStoreException("Record not found: " + identifier); } InputStream stream = rs.getBinaryStream(2); if (stream == null) { stream = new ByteArrayInputStream(new byte[0]); DbUtility.close(rs); } else if (copyWhenReading) { // If we copy while reading, create a temp file and close the stream File temp = moveToTempFile(stream); stream = new BufferedInputStream(new TempFileInputStream(temp)); DbUtility.close(rs); } else { stream = new BufferedInputStream(stream); inputStream.setResultSet(rs); } return stream; } catch (Exception e) { DbUtility.close(rs); throw convert("Retrieving database resource ", e); } } public synchronized void init(String homeDir) throws DataStoreException { try { initDatabaseType(); conHelper = createConnectionHelper(getDataSource()); if (isSchemaCheckEnabled()) { createCheckSchemaOperation().run(); } } catch (Exception e) { throw convert("Can not init data store, driver=" + driver + " url=" + url + " user=" + user + " schemaObjectPrefix=" + schemaObjectPrefix + " tableSQL=" + tableSQL + " createTableSQL=" + createTableSQL, e); } } private DataSource getDataSource() throws Exception { if (getDataSourceName() == null || "".equals(getDataSourceName())) { return connectionFactory.getDataSource(getDriver(), getUrl(), getUser(), getPassword()); } else { return connectionFactory.getDataSource(dataSourceName); } } /** * This method is called from the {@link #init(String)} method of this class and returns a * {@link ConnectionHelper} instance which is assigned to the {@code conHelper} field. Subclasses may * override it to return a specialized connection helper. * * @param dataSrc the {@link DataSource} of this persistence manager * @return a {@link ConnectionHelper} * @throws Exception on error */ protected ConnectionHelper createConnectionHelper(DataSource dataSrc) throws Exception { return new ConnectionHelper(dataSrc, false); } /** * This method is called from {@link #init(String)} after the * {@link #createConnectionHelper(DataSource)} method, and returns a default {@link CheckSchemaOperation}. * * @return a new {@link CheckSchemaOperation} instance */ protected final CheckSchemaOperation createCheckSchemaOperation() { String tableName = tablePrefix + schemaObjectPrefix + tableSQL; return new CheckSchemaOperation(conHelper, new ByteArrayInputStream(createTableSQL.getBytes()), tableName); } protected void initDatabaseType() throws DataStoreException { boolean failIfNotFound = false; if (databaseType == null) { if (dataSourceName != null) { try { databaseType = connectionFactory.getDataBaseType(dataSourceName); } catch (RepositoryException e) { throw new DataStoreException(e); } } else { if (!url.startsWith("jdbc:")) { return; } int start = "jdbc:".length(); int end = url.indexOf(':', start); databaseType = url.substring(start, end); } } else { failIfNotFound = true; } InputStream in = DbDataStore.class.getResourceAsStream(databaseType + ".properties"); if (in == null) { if (failIfNotFound) { String msg = "Configuration error: The resource '" + databaseType + ".properties' could not be found;" + " Please verify the databaseType property"; log.debug(msg); throw new DataStoreException(msg); } else { return; } } Properties prop = new Properties(); try { try { prop.load(in); } finally { in.close(); } } catch (IOException e) { String msg = "Configuration error: Could not read properties '" + databaseType + ".properties'"; log.debug(msg); throw new DataStoreException(msg, e); } if (driver == null) { driver = getProperty(prop, "driver", driver); } tableSQL = getProperty(prop, "table", tableSQL); createTableSQL = getProperty(prop, "createTable", createTableSQL); insertTempSQL = getProperty(prop, "insertTemp", insertTempSQL); updateDataSQL = getProperty(prop, "updateData", updateDataSQL); updateLastModifiedSQL = getProperty(prop, "updateLastModified", updateLastModifiedSQL); updateSQL = getProperty(prop, "update", updateSQL); deleteSQL = getProperty(prop, "delete", deleteSQL); deleteOlderSQL = getProperty(prop, "deleteOlder", deleteOlderSQL); selectMetaSQL = getProperty(prop, "selectMeta", selectMetaSQL); selectAllSQL = getProperty(prop, "selectAll", selectAllSQL); selectDataSQL = getProperty(prop, "selectData", selectDataSQL); storeStream = getProperty(prop, "storeStream", storeStream); if (!STORE_SIZE_MINUS_ONE.equals(storeStream) && !STORE_TEMP_FILE.equals(storeStream) && !STORE_SIZE_MAX.equals(storeStream)) { String msg = "Unsupported Stream store mechanism: " + storeStream + " supported are: " + STORE_SIZE_MINUS_ONE + ", " + STORE_TEMP_FILE + ", " + STORE_SIZE_MAX; log.debug(msg); throw new DataStoreException(msg); } } /** * Get the expanded property value. The following placeholders are supported: * ${table}: the table name (the default is DATASTORE) and * ${tablePrefix}: tablePrefix plus schemaObjectPrefix as set in the configuration * * @param prop the properties object * @param key the key * @param defaultValue the default value * @return the property value (placeholders are replaced) */ protected String getProperty(Properties prop, String key, String defaultValue) { String sql = prop.getProperty(key, defaultValue); sql = Text.replace(sql, "${table}", tableSQL).trim(); sql = Text.replace(sql, "${tablePrefix}", tablePrefix + schemaObjectPrefix).trim(); return sql; } /** * Convert an exception to a data store exception. * * @param cause the message * @param e the root cause * @return the data store exception */ protected DataStoreException convert(String cause, Exception e) { log.warn(cause, e); if (e instanceof DataStoreException) { return (DataStoreException) e; } else { return new DataStoreException(cause, e); } } public void updateModifiedDateOnAccess(long before) { log.debug("Update modifiedDate on access before " + before); minModifiedDate = before; } /** * Update the modified date of an entry if required. * * @param identifier the entry identifier * @param lastModified the current last modified date * @return the new modified date */ long touch(DataIdentifier identifier, long lastModified) throws DataStoreException { usesIdentifier(identifier); return updateLastModifiedDate(identifier.toString(), lastModified); } private long updateLastModifiedDate(String key, long lastModified) throws DataStoreException { if (lastModified < minModifiedDate) { long now = System.currentTimeMillis(); try { // UPDATE DATASTORE SET LAST_MODIFIED = ? WHERE ID = ? AND LAST_MODIFIED < ? conHelper.update(updateLastModifiedSQL, now, key, now); return now; } catch (Exception e) { throw convert("Can not update lastModified", e); } } return lastModified; } /** * Get the database type (if set). * @return the database type */ public String getDatabaseType() { return databaseType; } /** * Set the database type. By default the sub-protocol of the JDBC database URL is used if it is not set. * It must match the resource file [databaseType].properties. Example: mysql. * * @param databaseType */ public void setDatabaseType(String databaseType) { this.databaseType = databaseType; } /** * Get the database driver * * @return the driver */ public String getDriver() { return driver; } /** * Set the database driver class name. * If not set, the default driver class name for the database type is used, * as set in the [databaseType].properties resource; key 'driver'. * * @param driver */ public void setDriver(String driver) { this.driver = driver; } /** * Get the password. * * @return the password */ public String getPassword() { return password; } /** * Set the password. * * @param password */ public void setPassword(String password) { this.password = password; } /** * Get the database URL. * * @return the URL */ public String getUrl() { return url; } /** * Set the database URL. * Example: jdbc:postgresql:test * * @param url */ public void setUrl(String url) { this.url = url; } /** * Get the user name. * * @return the user name */ public String getUser() { return user; } /** * Set the user name. * * @param user */ public void setUser(String user) { this.user = user; } /** * @return whether the schema check is enabled */ public final boolean isSchemaCheckEnabled() { return schemaCheckEnabled; } /** * @param enabled set whether the schema check is enabled */ public final void setSchemaCheckEnabled(boolean enabled) { schemaCheckEnabled = enabled; } public synchronized void close() throws DataStoreException { // nothing to do } protected void usesIdentifier(DataIdentifier identifier) { inUse.put(identifier, new WeakReference<DataIdentifier>(identifier)); } public void clearInUse() { inUse.clear(); } protected synchronized MessageDigest getDigest() throws DataStoreException { try { return MessageDigest.getInstance(DIGEST); } catch (NoSuchAlgorithmException e) { throw convert("No such algorithm: " + DIGEST, e); } } /** * Get the maximum number of concurrent connections. * * @deprecated * @return the maximum number of connections. */ @Deprecated public int getMaxConnections() { return -1; } /** * Set the maximum number of concurrent connections in the pool. * At least 3 connections are required if the garbage collection process is used. * * @deprecated * @param maxConnections the new value */ @Deprecated public void setMaxConnections(int maxConnections) { // no effect } /** * Is a stream copied to a temporary file before returning? * * @return the setting */ public boolean getCopyWhenReading() { return copyWhenReading; } /** * The the copy setting. If enabled, * a stream is always copied to a temporary file when reading a stream. * * @param copyWhenReading the new setting */ public void setCopyWhenReading(boolean copyWhenReading) { this.copyWhenReading = copyWhenReading; } /** * Get the table prefix. * * @return the table prefix. */ public String getTablePrefix() { return tablePrefix; } /** * Set the new table prefix. The default is empty. * The table name is constructed like this: * ${tablePrefix}${schemaObjectPrefix}${tableName} * * @param tablePrefix the new value */ public void setTablePrefix(String tablePrefix) { this.tablePrefix = tablePrefix; } /** * Get the schema prefix. * * @return the schema object prefix */ public String getSchemaObjectPrefix() { return schemaObjectPrefix; } /** * Set the schema object prefix. The default is empty. * The table name is constructed like this: * ${tablePrefix}${schemaObjectPrefix}${tableName} * * @param schemaObjectPrefix the new prefix */ public void setSchemaObjectPrefix(String schemaObjectPrefix) { this.schemaObjectPrefix = schemaObjectPrefix; } public String getDataSourceName() { return dataSourceName; } public void setDataSourceName(String dataSourceName) { this.dataSourceName = dataSourceName; } }
googleapis/google-cloud-java
36,151
java-datalineage/proto-google-cloud-datalineage-v1/src/main/java/com/google/cloud/datacatalog/lineage/v1/ListRunsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/datacatalog/lineage/v1/lineage.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.datacatalog.lineage.v1; /** * * * <pre> * Response message for * [ListRuns][google.cloud.datacatalog.lineage.v1.ListRuns]. * </pre> * * Protobuf type {@code google.cloud.datacatalog.lineage.v1.ListRunsResponse} */ public final class ListRunsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.datacatalog.lineage.v1.ListRunsResponse) ListRunsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListRunsResponse.newBuilder() to construct. private ListRunsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListRunsResponse() { runs_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListRunsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datacatalog.lineage.v1.LineageProto .internal_static_google_cloud_datacatalog_lineage_v1_ListRunsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datacatalog.lineage.v1.LineageProto .internal_static_google_cloud_datacatalog_lineage_v1_ListRunsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datacatalog.lineage.v1.ListRunsResponse.class, com.google.cloud.datacatalog.lineage.v1.ListRunsResponse.Builder.class); } public static final int RUNS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.datacatalog.lineage.v1.Run> runs_; /** * * * <pre> * The runs from the specified project and location. * </pre> * * <code>repeated .google.cloud.datacatalog.lineage.v1.Run runs = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.datacatalog.lineage.v1.Run> getRunsList() { return runs_; } /** * * * <pre> * The runs from the specified project and location. * </pre> * * <code>repeated .google.cloud.datacatalog.lineage.v1.Run runs = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.datacatalog.lineage.v1.RunOrBuilder> getRunsOrBuilderList() { return runs_; } /** * * * <pre> * The runs from the specified project and location. * </pre> * * <code>repeated .google.cloud.datacatalog.lineage.v1.Run runs = 1;</code> */ @java.lang.Override public int getRunsCount() { return runs_.size(); } /** * * * <pre> * The runs from the specified project and location. * </pre> * * <code>repeated .google.cloud.datacatalog.lineage.v1.Run runs = 1;</code> */ @java.lang.Override public com.google.cloud.datacatalog.lineage.v1.Run getRuns(int index) { return runs_.get(index); } /** * * * <pre> * The runs from the specified project and location. * </pre> * * <code>repeated .google.cloud.datacatalog.lineage.v1.Run runs = 1;</code> */ @java.lang.Override public com.google.cloud.datacatalog.lineage.v1.RunOrBuilder getRunsOrBuilder(int index) { return runs_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The token to specify as `page_token` in the next call to get the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * The token to specify as `page_token` in the next call to get the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < runs_.size(); i++) { output.writeMessage(1, runs_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < runs_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, runs_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.datacatalog.lineage.v1.ListRunsResponse)) { return super.equals(obj); } com.google.cloud.datacatalog.lineage.v1.ListRunsResponse other = (com.google.cloud.datacatalog.lineage.v1.ListRunsResponse) obj; if (!getRunsList().equals(other.getRunsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getRunsCount() > 0) { hash = (37 * hash) + RUNS_FIELD_NUMBER; hash = (53 * hash) + getRunsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.datacatalog.lineage.v1.ListRunsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.lineage.v1.ListRunsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.lineage.v1.ListRunsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.lineage.v1.ListRunsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.lineage.v1.ListRunsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.lineage.v1.ListRunsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.lineage.v1.ListRunsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.lineage.v1.ListRunsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datacatalog.lineage.v1.ListRunsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.lineage.v1.ListRunsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datacatalog.lineage.v1.ListRunsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.lineage.v1.ListRunsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.datacatalog.lineage.v1.ListRunsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for * [ListRuns][google.cloud.datacatalog.lineage.v1.ListRuns]. * </pre> * * Protobuf type {@code google.cloud.datacatalog.lineage.v1.ListRunsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.lineage.v1.ListRunsResponse) com.google.cloud.datacatalog.lineage.v1.ListRunsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datacatalog.lineage.v1.LineageProto .internal_static_google_cloud_datacatalog_lineage_v1_ListRunsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datacatalog.lineage.v1.LineageProto .internal_static_google_cloud_datacatalog_lineage_v1_ListRunsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datacatalog.lineage.v1.ListRunsResponse.class, com.google.cloud.datacatalog.lineage.v1.ListRunsResponse.Builder.class); } // Construct using com.google.cloud.datacatalog.lineage.v1.ListRunsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (runsBuilder_ == null) { runs_ = java.util.Collections.emptyList(); } else { runs_ = null; runsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.datacatalog.lineage.v1.LineageProto .internal_static_google_cloud_datacatalog_lineage_v1_ListRunsResponse_descriptor; } @java.lang.Override public com.google.cloud.datacatalog.lineage.v1.ListRunsResponse getDefaultInstanceForType() { return com.google.cloud.datacatalog.lineage.v1.ListRunsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.datacatalog.lineage.v1.ListRunsResponse build() { com.google.cloud.datacatalog.lineage.v1.ListRunsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.datacatalog.lineage.v1.ListRunsResponse buildPartial() { com.google.cloud.datacatalog.lineage.v1.ListRunsResponse result = new com.google.cloud.datacatalog.lineage.v1.ListRunsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.datacatalog.lineage.v1.ListRunsResponse result) { if (runsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { runs_ = java.util.Collections.unmodifiableList(runs_); bitField0_ = (bitField0_ & ~0x00000001); } result.runs_ = runs_; } else { result.runs_ = runsBuilder_.build(); } } private void buildPartial0(com.google.cloud.datacatalog.lineage.v1.ListRunsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.datacatalog.lineage.v1.ListRunsResponse) { return mergeFrom((com.google.cloud.datacatalog.lineage.v1.ListRunsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.datacatalog.lineage.v1.ListRunsResponse other) { if (other == com.google.cloud.datacatalog.lineage.v1.ListRunsResponse.getDefaultInstance()) return this; if (runsBuilder_ == null) { if (!other.runs_.isEmpty()) { if (runs_.isEmpty()) { runs_ = other.runs_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureRunsIsMutable(); runs_.addAll(other.runs_); } onChanged(); } } else { if (!other.runs_.isEmpty()) { if (runsBuilder_.isEmpty()) { runsBuilder_.dispose(); runsBuilder_ = null; runs_ = other.runs_; bitField0_ = (bitField0_ & ~0x00000001); runsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRunsFieldBuilder() : null; } else { runsBuilder_.addAllMessages(other.runs_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.datacatalog.lineage.v1.Run m = input.readMessage( com.google.cloud.datacatalog.lineage.v1.Run.parser(), extensionRegistry); if (runsBuilder_ == null) { ensureRunsIsMutable(); runs_.add(m); } else { runsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.datacatalog.lineage.v1.Run> runs_ = java.util.Collections.emptyList(); private void ensureRunsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { runs_ = new java.util.ArrayList<com.google.cloud.datacatalog.lineage.v1.Run>(runs_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.datacatalog.lineage.v1.Run, com.google.cloud.datacatalog.lineage.v1.Run.Builder, com.google.cloud.datacatalog.lineage.v1.RunOrBuilder> runsBuilder_; /** * * * <pre> * The runs from the specified project and location. * </pre> * * <code>repeated .google.cloud.datacatalog.lineage.v1.Run runs = 1;</code> */ public java.util.List<com.google.cloud.datacatalog.lineage.v1.Run> getRunsList() { if (runsBuilder_ == null) { return java.util.Collections.unmodifiableList(runs_); } else { return runsBuilder_.getMessageList(); } } /** * * * <pre> * The runs from the specified project and location. * </pre> * * <code>repeated .google.cloud.datacatalog.lineage.v1.Run runs = 1;</code> */ public int getRunsCount() { if (runsBuilder_ == null) { return runs_.size(); } else { return runsBuilder_.getCount(); } } /** * * * <pre> * The runs from the specified project and location. * </pre> * * <code>repeated .google.cloud.datacatalog.lineage.v1.Run runs = 1;</code> */ public com.google.cloud.datacatalog.lineage.v1.Run getRuns(int index) { if (runsBuilder_ == null) { return runs_.get(index); } else { return runsBuilder_.getMessage(index); } } /** * * * <pre> * The runs from the specified project and location. * </pre> * * <code>repeated .google.cloud.datacatalog.lineage.v1.Run runs = 1;</code> */ public Builder setRuns(int index, com.google.cloud.datacatalog.lineage.v1.Run value) { if (runsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRunsIsMutable(); runs_.set(index, value); onChanged(); } else { runsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The runs from the specified project and location. * </pre> * * <code>repeated .google.cloud.datacatalog.lineage.v1.Run runs = 1;</code> */ public Builder setRuns( int index, com.google.cloud.datacatalog.lineage.v1.Run.Builder builderForValue) { if (runsBuilder_ == null) { ensureRunsIsMutable(); runs_.set(index, builderForValue.build()); onChanged(); } else { runsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The runs from the specified project and location. * </pre> * * <code>repeated .google.cloud.datacatalog.lineage.v1.Run runs = 1;</code> */ public Builder addRuns(com.google.cloud.datacatalog.lineage.v1.Run value) { if (runsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRunsIsMutable(); runs_.add(value); onChanged(); } else { runsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The runs from the specified project and location. * </pre> * * <code>repeated .google.cloud.datacatalog.lineage.v1.Run runs = 1;</code> */ public Builder addRuns(int index, com.google.cloud.datacatalog.lineage.v1.Run value) { if (runsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRunsIsMutable(); runs_.add(index, value); onChanged(); } else { runsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The runs from the specified project and location. * </pre> * * <code>repeated .google.cloud.datacatalog.lineage.v1.Run runs = 1;</code> */ public Builder addRuns(com.google.cloud.datacatalog.lineage.v1.Run.Builder builderForValue) { if (runsBuilder_ == null) { ensureRunsIsMutable(); runs_.add(builderForValue.build()); onChanged(); } else { runsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The runs from the specified project and location. * </pre> * * <code>repeated .google.cloud.datacatalog.lineage.v1.Run runs = 1;</code> */ public Builder addRuns( int index, com.google.cloud.datacatalog.lineage.v1.Run.Builder builderForValue) { if (runsBuilder_ == null) { ensureRunsIsMutable(); runs_.add(index, builderForValue.build()); onChanged(); } else { runsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The runs from the specified project and location. * </pre> * * <code>repeated .google.cloud.datacatalog.lineage.v1.Run runs = 1;</code> */ public Builder addAllRuns( java.lang.Iterable<? extends com.google.cloud.datacatalog.lineage.v1.Run> values) { if (runsBuilder_ == null) { ensureRunsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, runs_); onChanged(); } else { runsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The runs from the specified project and location. * </pre> * * <code>repeated .google.cloud.datacatalog.lineage.v1.Run runs = 1;</code> */ public Builder clearRuns() { if (runsBuilder_ == null) { runs_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { runsBuilder_.clear(); } return this; } /** * * * <pre> * The runs from the specified project and location. * </pre> * * <code>repeated .google.cloud.datacatalog.lineage.v1.Run runs = 1;</code> */ public Builder removeRuns(int index) { if (runsBuilder_ == null) { ensureRunsIsMutable(); runs_.remove(index); onChanged(); } else { runsBuilder_.remove(index); } return this; } /** * * * <pre> * The runs from the specified project and location. * </pre> * * <code>repeated .google.cloud.datacatalog.lineage.v1.Run runs = 1;</code> */ public com.google.cloud.datacatalog.lineage.v1.Run.Builder getRunsBuilder(int index) { return getRunsFieldBuilder().getBuilder(index); } /** * * * <pre> * The runs from the specified project and location. * </pre> * * <code>repeated .google.cloud.datacatalog.lineage.v1.Run runs = 1;</code> */ public com.google.cloud.datacatalog.lineage.v1.RunOrBuilder getRunsOrBuilder(int index) { if (runsBuilder_ == null) { return runs_.get(index); } else { return runsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The runs from the specified project and location. * </pre> * * <code>repeated .google.cloud.datacatalog.lineage.v1.Run runs = 1;</code> */ public java.util.List<? extends com.google.cloud.datacatalog.lineage.v1.RunOrBuilder> getRunsOrBuilderList() { if (runsBuilder_ != null) { return runsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(runs_); } } /** * * * <pre> * The runs from the specified project and location. * </pre> * * <code>repeated .google.cloud.datacatalog.lineage.v1.Run runs = 1;</code> */ public com.google.cloud.datacatalog.lineage.v1.Run.Builder addRunsBuilder() { return getRunsFieldBuilder() .addBuilder(com.google.cloud.datacatalog.lineage.v1.Run.getDefaultInstance()); } /** * * * <pre> * The runs from the specified project and location. * </pre> * * <code>repeated .google.cloud.datacatalog.lineage.v1.Run runs = 1;</code> */ public com.google.cloud.datacatalog.lineage.v1.Run.Builder addRunsBuilder(int index) { return getRunsFieldBuilder() .addBuilder(index, com.google.cloud.datacatalog.lineage.v1.Run.getDefaultInstance()); } /** * * * <pre> * The runs from the specified project and location. * </pre> * * <code>repeated .google.cloud.datacatalog.lineage.v1.Run runs = 1;</code> */ public java.util.List<com.google.cloud.datacatalog.lineage.v1.Run.Builder> getRunsBuilderList() { return getRunsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.datacatalog.lineage.v1.Run, com.google.cloud.datacatalog.lineage.v1.Run.Builder, com.google.cloud.datacatalog.lineage.v1.RunOrBuilder> getRunsFieldBuilder() { if (runsBuilder_ == null) { runsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.datacatalog.lineage.v1.Run, com.google.cloud.datacatalog.lineage.v1.Run.Builder, com.google.cloud.datacatalog.lineage.v1.RunOrBuilder>( runs_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); runs_ = null; } return runsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The token to specify as `page_token` in the next call to get the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The token to specify as `page_token` in the next call to get the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The token to specify as `page_token` in the next call to get the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The token to specify as `page_token` in the next call to get the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The token to specify as `page_token` in the next call to get the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.lineage.v1.ListRunsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.datacatalog.lineage.v1.ListRunsResponse) private static final com.google.cloud.datacatalog.lineage.v1.ListRunsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.datacatalog.lineage.v1.ListRunsResponse(); } public static com.google.cloud.datacatalog.lineage.v1.ListRunsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListRunsResponse> PARSER = new com.google.protobuf.AbstractParser<ListRunsResponse>() { @java.lang.Override public ListRunsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListRunsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListRunsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.datacatalog.lineage.v1.ListRunsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/geode
35,771
geode-wan/src/distributedTest/java/org/apache/geode/internal/cache/wan/wancommand/CreateGatewayReceiverCommandDUnitTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache.wan.wancommand; import static org.apache.geode.cache.wan.GatewayReceiverFactory.A_GATEWAY_RECEIVER_ALREADY_EXISTS_ON_THIS_MEMBER; import static org.apache.geode.distributed.ConfigurationProperties.BIND_ADDRESS; import static org.apache.geode.distributed.ConfigurationProperties.DISTRIBUTED_SYSTEM_ID; import static org.apache.geode.distributed.ConfigurationProperties.GROUPS; import static org.apache.geode.distributed.ConfigurationProperties.SERVER_BIND_ADDRESS; import static org.apache.geode.internal.cache.wan.wancommand.WANCommandUtils.getMember; import static org.apache.geode.internal.cache.wan.wancommand.WANCommandUtils.verifyGatewayReceiverProfile; import static org.apache.geode.internal.cache.wan.wancommand.WANCommandUtils.verifyGatewayReceiverServerLocations; import static org.apache.geode.internal.cache.wan.wancommand.WANCommandUtils.verifyReceiverCreationWithAttributes; import static org.apache.geode.management.internal.i18n.CliStrings.CREATE_GATEWAYRECEIVER; import static org.apache.geode.management.internal.i18n.CliStrings.GROUP; import static org.apache.geode.test.junit.rules.VMProvider.invokeInEveryMember; import static org.assertj.core.api.Assertions.assertThat; import java.net.InetAddress; import java.util.ArrayList; import java.util.List; import java.util.Properties; import junitparams.Parameters; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.apache.geode.cache.Cache; import org.apache.geode.cache.wan.GatewayReceiver; import org.apache.geode.distributed.DistributedMember; import org.apache.geode.internal.inet.LocalHostUtil; import org.apache.geode.management.internal.i18n.CliStrings; import org.apache.geode.test.dunit.rules.ClusterStartupRule; import org.apache.geode.test.dunit.rules.MemberVM; import org.apache.geode.test.junit.categories.WanTest; import org.apache.geode.test.junit.rules.GfshCommandRule; import org.apache.geode.test.junit.runners.GeodeParamsRunner; /** * DUnit tests for 'create gateway-receiver' command. */ @Category(WanTest.class) @RunWith(GeodeParamsRunner.class) public class CreateGatewayReceiverCommandDUnitTest { private static final String SERVER_1 = "server-1"; private static final String SERVER_2 = "server-2"; private static final String SERVER_3 = "server-3"; private MemberVM locatorSite1; private MemberVM server1; private MemberVM server2; private MemberVM server3; @Rule public ClusterStartupRule clusterStartupRule = new ClusterStartupRule(4); @Rule public GfshCommandRule gfsh = new GfshCommandRule(); @Before public void before() throws Exception { Properties props = new Properties(); props.setProperty(DISTRIBUTED_SYSTEM_ID, "" + 1); locatorSite1 = clusterStartupRule.startLocatorVM(0, props); // Connect Gfsh to locator. gfsh.connectAndVerify(locatorSite1); } @Test public void twoGatewayReceiversCannotCoexist() { Integer locator1Port = locatorSite1.getPort(); server1 = clusterStartupRule.startServerVM(1, locator1Port); String command = CREATE_GATEWAYRECEIVER; gfsh.executeAndAssertThat(command).statusIsSuccess() .tableHasColumnWithExactValuesInAnyOrder("Member", SERVER_1) .tableHasColumnWithValuesContaining("Message", "GatewayReceiver created on member \"" + SERVER_1 + "\""); gfsh.executeAndAssertThat(command).statusIsError() .tableHasColumnWithExactValuesInAnyOrder("Member", SERVER_1) .tableHasColumnWithValuesContaining("Status", "ERROR") .tableHasColumnWithValuesContaining("Message", "java.lang.IllegalStateException: " + A_GATEWAY_RECEIVER_ALREADY_EXISTS_ON_THIS_MEMBER); } @Test public void commandSucceedsIfAnyReceiverFailsToCreateEvenWithoutSkipOption() { // Create a receiver on one server (but not all) so that the command to create receivers on all // will fail on one (but not all). Such a failure should be reported as a failure to GFSH, // unless --skip-if-exists is present. Integer locator1Port = locatorSite1.getPort(); server1 = clusterStartupRule.startServerVM(1, locator1Port); server2 = clusterStartupRule.startServerVM(2, locator1Port); String createOnS1 = CREATE_GATEWAYRECEIVER + " --member=" + server1.getName(); gfsh.executeAndAssertThat(createOnS1).statusIsSuccess() .tableHasColumnWithExactValuesInAnyOrder("Member", SERVER_1) .tableHasColumnWithValuesContaining("Message", "GatewayReceiver created on member \"" + SERVER_1 + "\""); gfsh.executeAndAssertThat(CREATE_GATEWAYRECEIVER).statusIsSuccess() .tableHasColumnWithExactValuesInAnyOrder("Member", SERVER_1, SERVER_2) .tableHasColumnWithExactValuesInAnyOrder("Status", "ERROR", "OK") .tableHasColumnWithValuesContaining("Message", "java.lang.IllegalStateException: " + A_GATEWAY_RECEIVER_ALREADY_EXISTS_ON_THIS_MEMBER, "GatewayReceiver created on member \"" + SERVER_2 + "\""); } @Test public void commandSucceedsWhenReceiversAlreadyExistWhenSkipOptionIsPresent() { // Create a receiver on one server (but not all) so that the command to create receivers on all // will fail on one (but not all). Such a failure should be reported as a failure to GFSH, // unless --skip-if-exists is present. Integer locator1Port = locatorSite1.getPort(); server1 = clusterStartupRule.startServerVM(1, locator1Port); server2 = clusterStartupRule.startServerVM(2, locator1Port); String createOnS1 = CREATE_GATEWAYRECEIVER + " --member=" + server1.getName(); String createOnBoth = CREATE_GATEWAYRECEIVER + " --if-not-exists"; gfsh.executeAndAssertThat(createOnS1).statusIsSuccess() .tableHasColumnWithExactValuesInAnyOrder("Member", SERVER_1) .tableHasColumnWithValuesContaining("Message", "GatewayReceiver created on member \"" + SERVER_1 + "\""); gfsh.executeAndAssertThat(createOnBoth).statusIsSuccess() .tableHasColumnWithExactValuesInAnyOrder("Member", SERVER_1, SERVER_2) .tableHasColumnWithValuesContaining("Message", "Skipping: " + A_GATEWAY_RECEIVER_ALREADY_EXISTS_ON_THIS_MEMBER, "GatewayReceiver created on member \"" + SERVER_2 + "\""); } /** * GatewayReceiver with given attributes. Error scenario where the user tries to create more than * one receiver per member. */ @Test public void testCreateGatewayReceiverErrorWhenGatewayReceiverAlreadyExists() { Integer locator1Port = locatorSite1.getPort(); server1 = clusterStartupRule.startServerVM(1, locator1Port); server2 = clusterStartupRule.startServerVM(2, locator1Port); server3 = clusterStartupRule.startServerVM(3, locator1Port); // Initial Creation should succeed String command = CliStrings.CREATE_GATEWAYRECEIVER + " --" + CliStrings.CREATE_GATEWAYRECEIVER__BINDADDRESS + "=localhost" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__STARTPORT + "=10000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__ENDPORT + "=11000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__MAXTIMEBETWEENPINGS + "=100000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__SOCKETBUFFERSIZE + "=512000"; gfsh.executeAndAssertThat(command).statusIsSuccess() .tableHasColumnWithExactValuesInAnyOrder("Member", SERVER_1, SERVER_2, SERVER_3) .tableHasColumnWithValuesContaining("Message", "GatewayReceiver created on member \"" + SERVER_1 + "\"", "GatewayReceiver created on member \"" + SERVER_2 + "\"", "GatewayReceiver created on member \"" + SERVER_3 + "\""); invokeInEveryMember( () -> verifyReceiverCreationWithAttributes(!GatewayReceiver.DEFAULT_MANUAL_START, 10000, 11000, "localhost", 100000, 512000, null, GatewayReceiver.DEFAULT_HOSTNAME_FOR_SENDERS), server1, server2, server3); // This should fail as there's already a gateway receiver created on the member. gfsh.executeAndAssertThat(command).statusIsError() .tableHasColumnWithExactValuesInAnyOrder("Member", SERVER_1, SERVER_2, SERVER_3) .tableHasColumnWithExactValuesInAnyOrder("Status", "ERROR", "ERROR", "ERROR") .tableHasColumnWithExactValuesInAnyOrder("Message", " java.lang.IllegalStateException: " + A_GATEWAY_RECEIVER_ALREADY_EXISTS_ON_THIS_MEMBER, " java.lang.IllegalStateException: " + A_GATEWAY_RECEIVER_ALREADY_EXISTS_ON_THIS_MEMBER, " java.lang.IllegalStateException: " + A_GATEWAY_RECEIVER_ALREADY_EXISTS_ON_THIS_MEMBER); } /** * GatewayReceiver with all default attributes */ @Test public void testCreateGatewayReceiverWithDefault() throws Exception { Integer locator1Port = locatorSite1.getPort(); server1 = clusterStartupRule.startServerVM(1, locator1Port); server2 = clusterStartupRule.startServerVM(2, locator1Port); server3 = clusterStartupRule.startServerVM(3, locator1Port); // Default attributes. gfsh.executeAndAssertThat(CliStrings.CREATE_GATEWAYRECEIVER).statusIsSuccess() .tableHasColumnWithExactValuesInAnyOrder("Member", SERVER_1, SERVER_2, SERVER_3) .tableHasColumnWithValuesContaining("Message", "GatewayReceiver created on member \"" + SERVER_1 + "\"", "GatewayReceiver created on member \"" + SERVER_2 + "\"", "GatewayReceiver created on member \"" + SERVER_3 + "\""); // If neither bind-address or hostname-for-senders is set, profile // uses AcceptorImpl.getExternalAddress() to derive canonical hostname // when the Profile (and ServerLocation) are created String hostname = getHostName(); invokeInEveryMember(() -> { verifyGatewayReceiverProfile(hostname); verifyGatewayReceiverServerLocations(locator1Port, hostname); verifyReceiverCreationWithAttributes(!GatewayReceiver.DEFAULT_MANUAL_START, GatewayReceiver.DEFAULT_START_PORT, GatewayReceiver.DEFAULT_END_PORT, GatewayReceiver.DEFAULT_BIND_ADDRESS, GatewayReceiver.DEFAULT_MAXIMUM_TIME_BETWEEN_PINGS, GatewayReceiver.DEFAULT_SOCKET_BUFFER_SIZE, null, GatewayReceiver.DEFAULT_HOSTNAME_FOR_SENDERS); }, server1, server2, server3); } /** * GatewayReceiver with given attributes */ @Test public void testCreateGatewayReceiver() { Integer locator1Port = locatorSite1.getPort(); server1 = clusterStartupRule.startServerVM(1, locator1Port); server2 = clusterStartupRule.startServerVM(2, locator1Port); server3 = clusterStartupRule.startServerVM(3, locator1Port); String command = CliStrings.CREATE_GATEWAYRECEIVER + " --" + CliStrings.CREATE_GATEWAYRECEIVER__MANUALSTART + "=true" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__BINDADDRESS + "=localhost" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__STARTPORT + "=10000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__ENDPORT + "=11000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__MAXTIMEBETWEENPINGS + "=100000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__SOCKETBUFFERSIZE + "=512000"; gfsh.executeAndAssertThat(command).statusIsSuccess() .tableHasColumnWithExactValuesInAnyOrder("Member", SERVER_1, SERVER_2, SERVER_3) .tableHasColumnWithValuesContaining("Message", "GatewayReceiver created on member \"" + SERVER_1 + "\"", "GatewayReceiver created on member \"" + SERVER_2 + "\"", "GatewayReceiver created on member \"" + SERVER_3 + "\""); invokeInEveryMember(() -> verifyReceiverCreationWithAttributes(false, 10000, 11000, "localhost", 100000, 512000, null, GatewayReceiver.DEFAULT_HOSTNAME_FOR_SENDERS), server1, server2, server3); } /** * GatewayReceiver with hostnameForSenders */ @Test public void testCreateGatewayReceiverWithHostnameForSenders() throws Exception { Integer locator1Port = locatorSite1.getPort(); server1 = clusterStartupRule.startServerVM(1, locator1Port); server2 = clusterStartupRule.startServerVM(2, locator1Port); server3 = clusterStartupRule.startServerVM(3, locator1Port); String hostnameForSenders = getHostName(); String command = CliStrings.CREATE_GATEWAYRECEIVER + " --" + CliStrings.CREATE_GATEWAYRECEIVER__MANUALSTART + "=false" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__HOSTNAMEFORSENDERS + "=" + hostnameForSenders + " --" + CliStrings.CREATE_GATEWAYRECEIVER__STARTPORT + "=10000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__ENDPORT + "=11000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__MAXTIMEBETWEENPINGS + "=100000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__SOCKETBUFFERSIZE + "=512000"; gfsh.executeAndAssertThat(command).statusIsSuccess() .tableHasColumnWithExactValuesInAnyOrder("Member", SERVER_1, SERVER_2, SERVER_3) .tableHasColumnWithValuesContaining("Message", "GatewayReceiver created on member \"" + SERVER_1 + "\"", "GatewayReceiver created on member \"" + SERVER_2 + "\"", "GatewayReceiver created on member \"" + SERVER_3 + "\""); invokeInEveryMember(() -> { // verify hostname-for-senders is used when configured verifyGatewayReceiverProfile(hostnameForSenders); verifyGatewayReceiverServerLocations(locator1Port, hostnameForSenders); verifyReceiverCreationWithAttributes(true, 10000, 11000, "", 100000, 512000, null, hostnameForSenders); }, server1, server2, server3); } /** * GatewayReceiver with all default attributes and bind-address / server-bind-address in * gemfire-properties */ @Test @Parameters({BIND_ADDRESS, SERVER_BIND_ADDRESS}) public void testCreateGatewayReceiverWithDefaultsAndAddressProperties(String addressPropertyKey) throws Exception { String receiverGroup = "receiverGroup"; Integer locator1Port = locatorSite1.getPort(); String expectedBindAddress = getBindAddress(); Properties props = new Properties(); props.setProperty(GROUPS, receiverGroup); props.setProperty(addressPropertyKey, expectedBindAddress); server1 = clusterStartupRule.startServerVM(1, props, locator1Port); server2 = clusterStartupRule.startServerVM(2, props, locator1Port); server3 = clusterStartupRule.startServerVM(3, props, locator1Port); String command = CliStrings.CREATE_GATEWAYRECEIVER + " --" + GROUP + "=" + receiverGroup; gfsh.executeAndAssertThat(command).statusIsSuccess() .tableHasColumnWithExactValuesInAnyOrder("Member", SERVER_1, SERVER_2, SERVER_3) .tableHasColumnWithValuesContaining("Message", "GatewayReceiver created on member \"" + SERVER_1 + "\"", "GatewayReceiver created on member \"" + SERVER_2 + "\"", "GatewayReceiver created on member \"" + SERVER_3 + "\""); invokeInEveryMember(() -> { // verify bind-address used when provided as a gemfire property verifyGatewayReceiverProfile(expectedBindAddress); verifyGatewayReceiverServerLocations(locator1Port, expectedBindAddress); verifyReceiverCreationWithAttributes(!GatewayReceiver.DEFAULT_MANUAL_START, GatewayReceiver.DEFAULT_START_PORT, GatewayReceiver.DEFAULT_END_PORT, GatewayReceiver.DEFAULT_BIND_ADDRESS, GatewayReceiver.DEFAULT_MAXIMUM_TIME_BETWEEN_PINGS, GatewayReceiver.DEFAULT_SOCKET_BUFFER_SIZE, null, GatewayReceiver.DEFAULT_HOSTNAME_FOR_SENDERS); }, server1, server2, server3); } /** * GatewayReceiver with all default attributes and server-bind-address in the gemfire properties */ @Test public void testCreateGatewayReceiverWithDefaultsAndMultipleBindAddressProperties() throws Exception { String receiverGroup = "receiverGroup"; Integer locator1Port = locatorSite1.getPort(); String expectedBindAddress = getBindAddress(); Properties props = new Properties(); props.setProperty(GROUPS, receiverGroup); props.setProperty(BIND_ADDRESS, expectedBindAddress); props.setProperty(SERVER_BIND_ADDRESS, expectedBindAddress); server1 = clusterStartupRule.startServerVM(1, props, locator1Port); server2 = clusterStartupRule.startServerVM(2, props, locator1Port); server3 = clusterStartupRule.startServerVM(3, props, locator1Port); String command = CliStrings.CREATE_GATEWAYRECEIVER + " --" + GROUP + "=" + receiverGroup; gfsh.executeAndAssertThat(command).statusIsSuccess() .tableHasColumnWithExactValuesInAnyOrder("Member", SERVER_1, SERVER_2, SERVER_3) .tableHasColumnWithValuesContaining("Message", "GatewayReceiver created on member \"" + SERVER_1 + "\"", "GatewayReceiver created on member \"" + SERVER_2 + "\"", "GatewayReceiver created on member \"" + SERVER_3 + "\""); invokeInEveryMember(() -> { // verify server-bind-address used if provided as a gemfire property verifyGatewayReceiverProfile(expectedBindAddress); verifyGatewayReceiverServerLocations(locator1Port, expectedBindAddress); verifyReceiverCreationWithAttributes(!GatewayReceiver.DEFAULT_MANUAL_START, GatewayReceiver.DEFAULT_START_PORT, GatewayReceiver.DEFAULT_END_PORT, GatewayReceiver.DEFAULT_BIND_ADDRESS, GatewayReceiver.DEFAULT_MAXIMUM_TIME_BETWEEN_PINGS, GatewayReceiver.DEFAULT_SOCKET_BUFFER_SIZE, null, GatewayReceiver.DEFAULT_HOSTNAME_FOR_SENDERS); }, server1, server2, server3); } /** * GatewayReceiver with hostnameForSenders */ @Test public void testCreateGatewayReceiverWithHostnameForSendersAndServerBindAddressProperty() throws Exception { String receiverGroup = "receiverGroup"; String hostnameForSenders = getHostName(); String serverBindAddress = getBindAddress(); Integer locator1Port = locatorSite1.getPort(); Properties props = new Properties(); props.setProperty(GROUPS, receiverGroup); props.setProperty(SERVER_BIND_ADDRESS, serverBindAddress); server1 = clusterStartupRule.startServerVM(1, props, locator1Port); server2 = clusterStartupRule.startServerVM(2, props, locator1Port); server3 = clusterStartupRule.startServerVM(3, props, locator1Port); String command = CliStrings.CREATE_GATEWAYRECEIVER + " --" + CliStrings.CREATE_GATEWAYRECEIVER__MANUALSTART + "=false" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__HOSTNAMEFORSENDERS + "=" + hostnameForSenders + " --" + CliStrings.CREATE_GATEWAYRECEIVER__STARTPORT + "=10000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__ENDPORT + "=11000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__MAXTIMEBETWEENPINGS + "=100000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__SOCKETBUFFERSIZE + "=512000" + " --" + GROUP + "=" + receiverGroup; gfsh.executeAndAssertThat(command).statusIsSuccess() .tableHasColumnWithExactValuesInAnyOrder("Member", SERVER_1, SERVER_2, SERVER_3) .tableHasColumnWithValuesContaining("Message", "GatewayReceiver created on member \"" + SERVER_1 + "\"", "GatewayReceiver created on member \"" + SERVER_2 + "\"", "GatewayReceiver created on member \"" + SERVER_3 + "\""); invokeInEveryMember(() -> { // verify server-bind-address takes precedence over hostname-for-senders verifyGatewayReceiverProfile(hostnameForSenders); verifyGatewayReceiverServerLocations(locator1Port, hostnameForSenders); verifyReceiverCreationWithAttributes(true, 10000, 11000, "", 100000, 512000, null, hostnameForSenders); }, server1, server2, server3); } /** * GatewayReceiver with hostnameForSenders */ @Test public void testCreateGatewayReceiverWithHostnameForSendersAndBindAddressProperty() throws Exception { String receiverGroup = "receiverGroup"; String hostnameForSenders = getHostName(); Integer locator1Port = locatorSite1.getPort(); String expectedBindAddress = getBindAddress(); Properties props = new Properties(); props.setProperty(GROUPS, receiverGroup); props.setProperty(BIND_ADDRESS, expectedBindAddress); server1 = clusterStartupRule.startServerVM(1, props, locator1Port); server2 = clusterStartupRule.startServerVM(2, props, locator1Port); server3 = clusterStartupRule.startServerVM(3, props, locator1Port); String command = CliStrings.CREATE_GATEWAYRECEIVER + " --" + CliStrings.CREATE_GATEWAYRECEIVER__MANUALSTART + "=false" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__HOSTNAMEFORSENDERS + "=" + hostnameForSenders + " --" + CliStrings.CREATE_GATEWAYRECEIVER__STARTPORT + "=10000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__ENDPORT + "=11000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__MAXTIMEBETWEENPINGS + "=100000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__SOCKETBUFFERSIZE + "=512000" + " --" + GROUP + "=" + receiverGroup; gfsh.executeAndAssertThat(command).statusIsSuccess() .tableHasColumnWithExactValuesInAnyOrder("Member", SERVER_1, SERVER_2, SERVER_3) .tableHasColumnWithValuesContaining("Message", "GatewayReceiver created on member \"" + SERVER_1 + "\"", "GatewayReceiver created on member \"" + SERVER_2 + "\"", "GatewayReceiver created on member \"" + SERVER_3 + "\""); invokeInEveryMember(() -> { verifyGatewayReceiverProfile(hostnameForSenders); verifyGatewayReceiverServerLocations(locator1Port, hostnameForSenders); verifyReceiverCreationWithAttributes(true, 10000, 11000, "", 100000, 512000, null, hostnameForSenders); }, server1, server2, server3); } /** * GatewayReceiver with given attributes and a single GatewayTransportFilter. */ @Test public void testCreateGatewayReceiverWithGatewayTransportFilter() { Integer locator1Port = locatorSite1.getPort(); server1 = clusterStartupRule.startServerVM(1, locator1Port); server2 = clusterStartupRule.startServerVM(2, locator1Port); server3 = clusterStartupRule.startServerVM(3, locator1Port); String command = CliStrings.CREATE_GATEWAYRECEIVER + " --" + CliStrings.CREATE_GATEWAYRECEIVER__MANUALSTART + "=false" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__BINDADDRESS + "=localhost" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__STARTPORT + "=10000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__ENDPORT + "=11000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__MAXTIMEBETWEENPINGS + "=100000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__SOCKETBUFFERSIZE + "=512000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__GATEWAYTRANSPORTFILTER + "=org.apache.geode.cache30.MyGatewayTransportFilter1"; gfsh.executeAndAssertThat(command).statusIsSuccess() .tableHasColumnWithExactValuesInAnyOrder("Member", SERVER_1, SERVER_2, SERVER_3) .tableHasColumnWithValuesContaining("Message", "GatewayReceiver created on member \"" + SERVER_1 + "\"", "GatewayReceiver created on member \"" + SERVER_2 + "\"", "GatewayReceiver created on member \"" + SERVER_3 + "\""); List<String> transportFilters = new ArrayList<>(); transportFilters.add("org.apache.geode.cache30.MyGatewayTransportFilter1"); invokeInEveryMember( () -> verifyReceiverCreationWithAttributes(true, 10000, 11000, "localhost", 100000, 512000, transportFilters, GatewayReceiver.DEFAULT_HOSTNAME_FOR_SENDERS), server1, server2, server3); } /** * GatewayReceiver with given attributes and multiple GatewayTransportFilters. */ @Test public void testCreateGatewayReceiverWithMultipleGatewayTransportFilters() { Integer locator1Port = locatorSite1.getPort(); server1 = clusterStartupRule.startServerVM(1, locator1Port); server2 = clusterStartupRule.startServerVM(2, locator1Port); server3 = clusterStartupRule.startServerVM(3, locator1Port); String command = CliStrings.CREATE_GATEWAYRECEIVER + " --" + CliStrings.CREATE_GATEWAYRECEIVER__BINDADDRESS + "=localhost" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__STARTPORT + "=10000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__ENDPORT + "=11000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__MAXTIMEBETWEENPINGS + "=100000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__SOCKETBUFFERSIZE + "=512000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__GATEWAYTRANSPORTFILTER + "=org.apache.geode.cache30.MyGatewayTransportFilter1,org.apache.geode.cache30.MyGatewayTransportFilter2"; gfsh.executeAndAssertThat(command).statusIsSuccess() .tableHasColumnWithExactValuesInAnyOrder("Member", SERVER_1, SERVER_2, SERVER_3) .tableHasColumnWithValuesContaining("Message", "GatewayReceiver created on member \"" + SERVER_1 + "\"", "GatewayReceiver created on member \"" + SERVER_2 + "\"", "GatewayReceiver created on member \"" + SERVER_3 + "\""); List<String> transportFilters = new ArrayList<>(); transportFilters.add("org.apache.geode.cache30.MyGatewayTransportFilter1"); transportFilters.add("org.apache.geode.cache30.MyGatewayTransportFilter2"); invokeInEveryMember(() -> verifyReceiverCreationWithAttributes( !GatewayReceiver.DEFAULT_MANUAL_START, 10000, 11000, "localhost", 100000, 512000, transportFilters, GatewayReceiver.DEFAULT_HOSTNAME_FOR_SENDERS), server1, server2, server3); } /** * GatewayReceiver with given attributes on the given member. */ @Test public void testCreateGatewayReceiverOnSingleMember() { Integer locator1Port = locatorSite1.getPort(); server1 = clusterStartupRule.startServerVM(1, locator1Port); server2 = clusterStartupRule.startServerVM(2, locator1Port); server3 = clusterStartupRule.startServerVM(3, locator1Port); DistributedMember server1Member = getMember(server1.getVM()); String command = CliStrings.CREATE_GATEWAYRECEIVER + " --" + CliStrings.CREATE_GATEWAYRECEIVER__MANUALSTART + "=true" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__BINDADDRESS + "=localhost" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__STARTPORT + "=10000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__ENDPORT + "=11000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__MAXTIMEBETWEENPINGS + "=100000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__SOCKETBUFFERSIZE + "=512000" + " --" + CliStrings.MEMBER + "=" + server1Member.getId(); gfsh.executeAndAssertThat(command).statusIsSuccess() .tableHasColumnWithExactValuesInAnyOrder("Member", SERVER_1) .tableHasColumnWithValuesContaining("Message", "GatewayReceiver created on member \"" + SERVER_1 + "\""); invokeInEveryMember(() -> verifyReceiverCreationWithAttributes(false, 10000, 11000, "localhost", 100000, 512000, null, GatewayReceiver.DEFAULT_HOSTNAME_FOR_SENDERS), server1); invokeInEveryMember(() -> { Cache cache = ClusterStartupRule.getCache(); assertThat(cache).isNotNull(); assertThat(cache.getGatewayReceivers()).isEmpty(); }, server2, server3); } /** * GatewayReceiver with given attributes on multiple members. */ @Test public void testCreateGatewayReceiverOnMultipleMembers() { Integer locator1Port = locatorSite1.getPort(); server1 = clusterStartupRule.startServerVM(1, locator1Port); server2 = clusterStartupRule.startServerVM(2, locator1Port); server3 = clusterStartupRule.startServerVM(3, locator1Port); DistributedMember server1Member = getMember(server1.getVM()); DistributedMember server2Member = getMember(server2.getVM()); String command = CliStrings.CREATE_GATEWAYRECEIVER + " --" + CliStrings.CREATE_GATEWAYRECEIVER__MANUALSTART + "=true" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__BINDADDRESS + "=localhost" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__STARTPORT + "=10000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__ENDPORT + "=11000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__MAXTIMEBETWEENPINGS + "=100000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__SOCKETBUFFERSIZE + "=512000" + " --" + CliStrings.MEMBER + "=" + server1Member.getId() + "," + server2Member.getId(); gfsh.executeAndAssertThat(command).statusIsSuccess() .tableHasColumnWithExactValuesInAnyOrder("Member", SERVER_1, SERVER_2) .tableHasColumnWithValuesContaining("Message", "GatewayReceiver created on member \"" + SERVER_1 + "\"", "GatewayReceiver created on member \"" + SERVER_2 + "\""); invokeInEveryMember(() -> verifyReceiverCreationWithAttributes(false, 10000, 11000, "localhost", 100000, 512000, null, GatewayReceiver.DEFAULT_HOSTNAME_FOR_SENDERS), server1, server2); invokeInEveryMember(() -> { Cache cache = ClusterStartupRule.getCache(); assertThat(cache).isNotNull(); assertThat(cache.getGatewayReceivers()).isEmpty(); }, server3); } /** * GatewayReceiver with given attributes on the given group. */ @Test public void testCreateGatewayReceiverOnGroup() { String groups = "receiverGroup1"; int locator1Port = locatorSite1.getPort(); server1 = startServerWithGroups(1, groups, locator1Port); server2 = startServerWithGroups(2, groups, locator1Port); server3 = startServerWithGroups(3, groups, locator1Port); String command = CliStrings.CREATE_GATEWAYRECEIVER + " --" + CliStrings.CREATE_GATEWAYRECEIVER__MANUALSTART + "=true" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__BINDADDRESS + "=localhost" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__STARTPORT + "=10000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__ENDPORT + "=11000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__MAXTIMEBETWEENPINGS + "=100000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__SOCKETBUFFERSIZE + "=512000" + " --" + GROUP + "=receiverGroup1"; gfsh.executeAndAssertThat(command).statusIsSuccess() .tableHasColumnWithExactValuesInAnyOrder("Member", SERVER_1, SERVER_2, SERVER_3) .tableHasColumnWithValuesContaining("Message", "GatewayReceiver created on member \"" + SERVER_1 + "\"", "GatewayReceiver created on member \"" + SERVER_2 + "\"", "GatewayReceiver created on member \"" + SERVER_3 + "\""); invokeInEveryMember(() -> verifyReceiverCreationWithAttributes(false, 10000, 11000, "localhost", 100000, 512000, null, GatewayReceiver.DEFAULT_HOSTNAME_FOR_SENDERS), server1, server2, server3); } /** * GatewayReceiver with given attributes on the given group. Only 2 of 3 members are part of the * group. */ @Test public void testCreateGatewayReceiverOnGroupScenario2() { String group1 = "receiverGroup1"; String group2 = "receiverGroup2"; int locator1Port = locatorSite1.getPort(); server1 = startServerWithGroups(1, group1, locator1Port); server2 = startServerWithGroups(2, group1, locator1Port); server3 = startServerWithGroups(3, group2, locator1Port); String command = CliStrings.CREATE_GATEWAYRECEIVER + " --" + CliStrings.CREATE_GATEWAYRECEIVER__MANUALSTART + "=true" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__BINDADDRESS + "=localhost" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__STARTPORT + "=10000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__ENDPORT + "=11000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__MAXTIMEBETWEENPINGS + "=100000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__SOCKETBUFFERSIZE + "=512000" + " --" + GROUP + "=receiverGroup1"; gfsh.executeAndAssertThat(command).statusIsSuccess() .tableHasColumnWithExactValuesInAnyOrder("Member", SERVER_1, SERVER_2) .tableHasColumnWithValuesContaining("Message", "GatewayReceiver created on member \"" + SERVER_1 + "\"", "GatewayReceiver created on member \"" + SERVER_2 + "\""); invokeInEveryMember(() -> verifyReceiverCreationWithAttributes(false, 10000, 11000, "localhost", 100000, 512000, null, GatewayReceiver.DEFAULT_HOSTNAME_FOR_SENDERS), server1, server2); invokeInEveryMember(() -> { Cache cache = ClusterStartupRule.getCache(); assertThat(cache).isNotNull(); assertThat(cache.getGatewayReceivers()).isEmpty(); }, server3); } /** * GatewayReceiver with given attributes on multiple groups. */ @Test public void testCreateGatewayReceiverOnMultipleGroups() { int locator1Port = locatorSite1.getPort(); server1 = startServerWithGroups(1, "receiverGroup1", locator1Port); server2 = startServerWithGroups(2, "receiverGroup1", locator1Port); server3 = startServerWithGroups(3, "receiverGroup2", locator1Port); String command = CliStrings.CREATE_GATEWAYRECEIVER + " --" + CliStrings.CREATE_GATEWAYRECEIVER__MANUALSTART + "=true" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__BINDADDRESS + "=localhost" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__STARTPORT + "=10000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__ENDPORT + "=11000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__MAXTIMEBETWEENPINGS + "=100000" + " --" + CliStrings.CREATE_GATEWAYRECEIVER__SOCKETBUFFERSIZE + "=512000" + " --" + GROUP + "=receiverGroup1,receiverGroup2"; gfsh.executeAndAssertThat(command).statusIsSuccess() .tableHasColumnWithExactValuesInAnyOrder("Member", SERVER_1, SERVER_2, SERVER_3) .tableHasColumnWithValuesContaining("Message", "GatewayReceiver created on member \"" + SERVER_1 + "\"", "GatewayReceiver created on member \"" + SERVER_2 + "\"", "GatewayReceiver created on member \"" + SERVER_3 + "\""); invokeInEveryMember(() -> verifyReceiverCreationWithAttributes(false, 10000, 11000, "localhost", 100000, 512000, null, GatewayReceiver.DEFAULT_HOSTNAME_FOR_SENDERS), server1, server2, server3); } private String getHostName() throws Exception { return LocalHostUtil.getLocalHost().getCanonicalHostName(); } private String getBindAddress() throws Exception { return InetAddress.getLocalHost().getHostAddress(); } private MemberVM startServerWithGroups(int index, String groups, int locPort) { Properties props = new Properties(); props.setProperty(GROUPS, groups); return clusterStartupRule.startServerVM(index, props, locPort); } }
googleapis/google-cloud-java
36,186
java-shopping-merchant-conversions/proto-google-shopping-merchant-conversions-v1/src/main/java/com/google/shopping/merchant/conversions/v1/GoogleAnalyticsLink.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/shopping/merchant/conversions/v1/conversionsources.proto // Protobuf Java Version: 3.25.8 package com.google.shopping.merchant.conversions.v1; /** * * * <pre> * "Google Analytics Link" sources can be used to get conversion data from an * existing Google Analytics property into the linked Merchant Center account. * </pre> * * Protobuf type {@code google.shopping.merchant.conversions.v1.GoogleAnalyticsLink} */ public final class GoogleAnalyticsLink extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.shopping.merchant.conversions.v1.GoogleAnalyticsLink) GoogleAnalyticsLinkOrBuilder { private static final long serialVersionUID = 0L; // Use GoogleAnalyticsLink.newBuilder() to construct. private GoogleAnalyticsLink(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private GoogleAnalyticsLink() { property_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new GoogleAnalyticsLink(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.conversions.v1.ConversionSourcesProto .internal_static_google_shopping_merchant_conversions_v1_GoogleAnalyticsLink_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.conversions.v1.ConversionSourcesProto .internal_static_google_shopping_merchant_conversions_v1_GoogleAnalyticsLink_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink.class, com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink.Builder.class); } private int bitField0_; public static final int PROPERTY_ID_FIELD_NUMBER = 1; private long propertyId_ = 0L; /** * * * <pre> * Required. Immutable. ID of the Google Analytics property the merchant is * linked to. * </pre> * * <code> * int64 property_id = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE]; * </code> * * @return The propertyId. */ @java.lang.Override public long getPropertyId() { return propertyId_; } public static final int ATTRIBUTION_SETTINGS_FIELD_NUMBER = 2; private com.google.shopping.merchant.conversions.v1.AttributionSettings attributionSettings_; /** * * * <pre> * Output only. Attribution settings for the linked Google Analytics property. * </pre> * * <code> * .google.shopping.merchant.conversions.v1.AttributionSettings attribution_settings = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the attributionSettings field is set. */ @java.lang.Override public boolean hasAttributionSettings() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Output only. Attribution settings for the linked Google Analytics property. * </pre> * * <code> * .google.shopping.merchant.conversions.v1.AttributionSettings attribution_settings = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The attributionSettings. */ @java.lang.Override public com.google.shopping.merchant.conversions.v1.AttributionSettings getAttributionSettings() { return attributionSettings_ == null ? com.google.shopping.merchant.conversions.v1.AttributionSettings.getDefaultInstance() : attributionSettings_; } /** * * * <pre> * Output only. Attribution settings for the linked Google Analytics property. * </pre> * * <code> * .google.shopping.merchant.conversions.v1.AttributionSettings attribution_settings = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public com.google.shopping.merchant.conversions.v1.AttributionSettingsOrBuilder getAttributionSettingsOrBuilder() { return attributionSettings_ == null ? com.google.shopping.merchant.conversions.v1.AttributionSettings.getDefaultInstance() : attributionSettings_; } public static final int PROPERTY_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object property_ = ""; /** * * * <pre> * Output only. Name of the Google Analytics property the merchant is linked * to. * </pre> * * <code>string property = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The property. */ @java.lang.Override public java.lang.String getProperty() { java.lang.Object ref = property_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); property_ = s; return s; } } /** * * * <pre> * Output only. Name of the Google Analytics property the merchant is linked * to. * </pre> * * <code>string property = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for property. */ @java.lang.Override public com.google.protobuf.ByteString getPropertyBytes() { java.lang.Object ref = property_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); property_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (propertyId_ != 0L) { output.writeInt64(1, propertyId_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getAttributionSettings()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(property_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, property_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (propertyId_ != 0L) { size += com.google.protobuf.CodedOutputStream.computeInt64Size(1, propertyId_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getAttributionSettings()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(property_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, property_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink)) { return super.equals(obj); } com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink other = (com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink) obj; if (getPropertyId() != other.getPropertyId()) return false; if (hasAttributionSettings() != other.hasAttributionSettings()) return false; if (hasAttributionSettings()) { if (!getAttributionSettings().equals(other.getAttributionSettings())) return false; } if (!getProperty().equals(other.getProperty())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PROPERTY_ID_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getPropertyId()); if (hasAttributionSettings()) { hash = (37 * hash) + ATTRIBUTION_SETTINGS_FIELD_NUMBER; hash = (53 * hash) + getAttributionSettings().hashCode(); } hash = (37 * hash) + PROPERTY_FIELD_NUMBER; hash = (53 * hash) + getProperty().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * "Google Analytics Link" sources can be used to get conversion data from an * existing Google Analytics property into the linked Merchant Center account. * </pre> * * Protobuf type {@code google.shopping.merchant.conversions.v1.GoogleAnalyticsLink} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.shopping.merchant.conversions.v1.GoogleAnalyticsLink) com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLinkOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.conversions.v1.ConversionSourcesProto .internal_static_google_shopping_merchant_conversions_v1_GoogleAnalyticsLink_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.conversions.v1.ConversionSourcesProto .internal_static_google_shopping_merchant_conversions_v1_GoogleAnalyticsLink_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink.class, com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink.Builder.class); } // Construct using com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getAttributionSettingsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; propertyId_ = 0L; attributionSettings_ = null; if (attributionSettingsBuilder_ != null) { attributionSettingsBuilder_.dispose(); attributionSettingsBuilder_ = null; } property_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.shopping.merchant.conversions.v1.ConversionSourcesProto .internal_static_google_shopping_merchant_conversions_v1_GoogleAnalyticsLink_descriptor; } @java.lang.Override public com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink getDefaultInstanceForType() { return com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink.getDefaultInstance(); } @java.lang.Override public com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink build() { com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink buildPartial() { com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink result = new com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.propertyId_ = propertyId_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.attributionSettings_ = attributionSettingsBuilder_ == null ? attributionSettings_ : attributionSettingsBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.property_ = property_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink) { return mergeFrom((com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink other) { if (other == com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink.getDefaultInstance()) return this; if (other.getPropertyId() != 0L) { setPropertyId(other.getPropertyId()); } if (other.hasAttributionSettings()) { mergeAttributionSettings(other.getAttributionSettings()); } if (!other.getProperty().isEmpty()) { property_ = other.property_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { propertyId_ = input.readInt64(); bitField0_ |= 0x00000001; break; } // case 8 case 18: { input.readMessage( getAttributionSettingsFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 26: { property_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private long propertyId_; /** * * * <pre> * Required. Immutable. ID of the Google Analytics property the merchant is * linked to. * </pre> * * <code> * int64 property_id = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE]; * </code> * * @return The propertyId. */ @java.lang.Override public long getPropertyId() { return propertyId_; } /** * * * <pre> * Required. Immutable. ID of the Google Analytics property the merchant is * linked to. * </pre> * * <code> * int64 property_id = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE]; * </code> * * @param value The propertyId to set. * @return This builder for chaining. */ public Builder setPropertyId(long value) { propertyId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Immutable. ID of the Google Analytics property the merchant is * linked to. * </pre> * * <code> * int64 property_id = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE]; * </code> * * @return This builder for chaining. */ public Builder clearPropertyId() { bitField0_ = (bitField0_ & ~0x00000001); propertyId_ = 0L; onChanged(); return this; } private com.google.shopping.merchant.conversions.v1.AttributionSettings attributionSettings_; private com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.merchant.conversions.v1.AttributionSettings, com.google.shopping.merchant.conversions.v1.AttributionSettings.Builder, com.google.shopping.merchant.conversions.v1.AttributionSettingsOrBuilder> attributionSettingsBuilder_; /** * * * <pre> * Output only. Attribution settings for the linked Google Analytics property. * </pre> * * <code> * .google.shopping.merchant.conversions.v1.AttributionSettings attribution_settings = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the attributionSettings field is set. */ public boolean hasAttributionSettings() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Output only. Attribution settings for the linked Google Analytics property. * </pre> * * <code> * .google.shopping.merchant.conversions.v1.AttributionSettings attribution_settings = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The attributionSettings. */ public com.google.shopping.merchant.conversions.v1.AttributionSettings getAttributionSettings() { if (attributionSettingsBuilder_ == null) { return attributionSettings_ == null ? com.google.shopping.merchant.conversions.v1.AttributionSettings.getDefaultInstance() : attributionSettings_; } else { return attributionSettingsBuilder_.getMessage(); } } /** * * * <pre> * Output only. Attribution settings for the linked Google Analytics property. * </pre> * * <code> * .google.shopping.merchant.conversions.v1.AttributionSettings attribution_settings = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setAttributionSettings( com.google.shopping.merchant.conversions.v1.AttributionSettings value) { if (attributionSettingsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } attributionSettings_ = value; } else { attributionSettingsBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Output only. Attribution settings for the linked Google Analytics property. * </pre> * * <code> * .google.shopping.merchant.conversions.v1.AttributionSettings attribution_settings = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setAttributionSettings( com.google.shopping.merchant.conversions.v1.AttributionSettings.Builder builderForValue) { if (attributionSettingsBuilder_ == null) { attributionSettings_ = builderForValue.build(); } else { attributionSettingsBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Output only. Attribution settings for the linked Google Analytics property. * </pre> * * <code> * .google.shopping.merchant.conversions.v1.AttributionSettings attribution_settings = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder mergeAttributionSettings( com.google.shopping.merchant.conversions.v1.AttributionSettings value) { if (attributionSettingsBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && attributionSettings_ != null && attributionSettings_ != com.google.shopping.merchant.conversions.v1.AttributionSettings .getDefaultInstance()) { getAttributionSettingsBuilder().mergeFrom(value); } else { attributionSettings_ = value; } } else { attributionSettingsBuilder_.mergeFrom(value); } if (attributionSettings_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Output only. Attribution settings for the linked Google Analytics property. * </pre> * * <code> * .google.shopping.merchant.conversions.v1.AttributionSettings attribution_settings = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder clearAttributionSettings() { bitField0_ = (bitField0_ & ~0x00000002); attributionSettings_ = null; if (attributionSettingsBuilder_ != null) { attributionSettingsBuilder_.dispose(); attributionSettingsBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Output only. Attribution settings for the linked Google Analytics property. * </pre> * * <code> * .google.shopping.merchant.conversions.v1.AttributionSettings attribution_settings = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.shopping.merchant.conversions.v1.AttributionSettings.Builder getAttributionSettingsBuilder() { bitField0_ |= 0x00000002; onChanged(); return getAttributionSettingsFieldBuilder().getBuilder(); } /** * * * <pre> * Output only. Attribution settings for the linked Google Analytics property. * </pre> * * <code> * .google.shopping.merchant.conversions.v1.AttributionSettings attribution_settings = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.shopping.merchant.conversions.v1.AttributionSettingsOrBuilder getAttributionSettingsOrBuilder() { if (attributionSettingsBuilder_ != null) { return attributionSettingsBuilder_.getMessageOrBuilder(); } else { return attributionSettings_ == null ? com.google.shopping.merchant.conversions.v1.AttributionSettings.getDefaultInstance() : attributionSettings_; } } /** * * * <pre> * Output only. Attribution settings for the linked Google Analytics property. * </pre> * * <code> * .google.shopping.merchant.conversions.v1.AttributionSettings attribution_settings = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.merchant.conversions.v1.AttributionSettings, com.google.shopping.merchant.conversions.v1.AttributionSettings.Builder, com.google.shopping.merchant.conversions.v1.AttributionSettingsOrBuilder> getAttributionSettingsFieldBuilder() { if (attributionSettingsBuilder_ == null) { attributionSettingsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.merchant.conversions.v1.AttributionSettings, com.google.shopping.merchant.conversions.v1.AttributionSettings.Builder, com.google.shopping.merchant.conversions.v1.AttributionSettingsOrBuilder>( getAttributionSettings(), getParentForChildren(), isClean()); attributionSettings_ = null; } return attributionSettingsBuilder_; } private java.lang.Object property_ = ""; /** * * * <pre> * Output only. Name of the Google Analytics property the merchant is linked * to. * </pre> * * <code>string property = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The property. */ public java.lang.String getProperty() { java.lang.Object ref = property_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); property_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Output only. Name of the Google Analytics property the merchant is linked * to. * </pre> * * <code>string property = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for property. */ public com.google.protobuf.ByteString getPropertyBytes() { java.lang.Object ref = property_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); property_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Output only. Name of the Google Analytics property the merchant is linked * to. * </pre> * * <code>string property = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The property to set. * @return This builder for chaining. */ public Builder setProperty(java.lang.String value) { if (value == null) { throw new NullPointerException(); } property_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Output only. Name of the Google Analytics property the merchant is linked * to. * </pre> * * <code>string property = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return This builder for chaining. */ public Builder clearProperty() { property_ = getDefaultInstance().getProperty(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Output only. Name of the Google Analytics property the merchant is linked * to. * </pre> * * <code>string property = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The bytes for property to set. * @return This builder for chaining. */ public Builder setPropertyBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); property_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.shopping.merchant.conversions.v1.GoogleAnalyticsLink) } // @@protoc_insertion_point(class_scope:google.shopping.merchant.conversions.v1.GoogleAnalyticsLink) private static final com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink(); } public static com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<GoogleAnalyticsLink> PARSER = new com.google.protobuf.AbstractParser<GoogleAnalyticsLink>() { @java.lang.Override public GoogleAnalyticsLink parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<GoogleAnalyticsLink> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<GoogleAnalyticsLink> getParserForType() { return PARSER; } @java.lang.Override public com.google.shopping.merchant.conversions.v1.GoogleAnalyticsLink getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
oracle/graal
36,462
truffle/src/com.oracle.truffle.api.strings/src/com/oracle/truffle/api/strings/IndexOfCodePointSet.java
/* * Copyright (c) 2022, 2023, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * The Universal Permissive License (UPL), Version 1.0 * * Subject to the condition set forth below, permission is hereby granted to any * person obtaining a copy of this software, associated documentation and/or * data (collectively the "Software"), free of charge and under any and all * copyright rights in the Software, and any and all patent rights owned or * freely licensable by each licensor hereunder covering either (i) the * unmodified Software as contributed to or provided by such licensor, or (ii) * the Larger Works (as defined below), to deal in both * * (a) the Software, and * * (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if * one is included with the Software each a "Larger Work" to which the Software * is contributed by such licensors), * * without restriction, including without limitation the rights to copy, create * derivative works of, display, perform, and distribute the Software and make, * use, sell, offer for sale, import, export, have made, and have sold the * Software and the Larger Work(s), and to sublicense the foregoing rights on * either these or other terms. * * This license is subject to the following condition: * * The above copyright notice and either this complete permission notice or at a * minimum a reference to the UPL must be included in all copies or substantial * portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.oracle.truffle.api.strings; import static com.oracle.truffle.api.strings.TStringUnsafe.byteArrayBaseOffset; import java.util.ArrayList; import java.util.Arrays; import com.oracle.truffle.api.ArrayUtils; import com.oracle.truffle.api.CompilerAsserts; import com.oracle.truffle.api.CompilerDirectives; import com.oracle.truffle.api.CompilerDirectives.CompilationFinal; import com.oracle.truffle.api.dsl.Cached; import com.oracle.truffle.api.dsl.Specialization; import com.oracle.truffle.api.nodes.Node; import com.oracle.truffle.api.profiles.InlinedBranchProfile; import com.oracle.truffle.api.strings.IndexOfCodePointSetFactory.AnyMatchNodeGen; import com.oracle.truffle.api.strings.IndexOfCodePointSetFactory.IndexOfAnyRangeNodeGen; import com.oracle.truffle.api.strings.IndexOfCodePointSetFactory.IndexOfAnyValueNodeGen; import com.oracle.truffle.api.strings.IndexOfCodePointSetFactory.IndexOfBitSetNodeGen; import com.oracle.truffle.api.strings.IndexOfCodePointSetFactory.IndexOfRangesNodeGen; import com.oracle.truffle.api.strings.IndexOfCodePointSetFactory.IndexOfStringNodeGen; import com.oracle.truffle.api.strings.IndexOfCodePointSetFactory.IndexOfTableNodeGen; import com.oracle.truffle.api.strings.IndexOfCodePointSetFactory.NoMatchNodeGen; import com.oracle.truffle.api.strings.TruffleString.Encoding; final class IndexOfCodePointSet { private static final int[] EMPTY_RANGES = {}; private static final int[] ASCII_RANGE = {0, 0x7f}; private static final int[] LATIN_RANGE = {0, 0xff}; private static final int[] BMP_WITHOUT_SURROGATES = {0x0000, 0xd7ff, 0xe000, 0xffff}; private static final int[] ALL_WITHOUT_SURROGATES = {0x0000, 0xd7ff, 0xe000, 0x10ffff}; private static final int[] ALL = {0x0000, 0x10ffff}; private static final int TABLE_SIZE = 16; static IndexOfNode[] fromRanges(int[] ranges, Encoding encoding) { checkRangesArray(ranges, encoding); return extractIndexOfNodes(ranges, encoding); } static void checkRangesArray(int[] ranges, Encoding encoding) { if ((ranges.length & 1) != 0) { throw new IllegalArgumentException("ranges must have an even number of elements"); } int maxCodePoint = Encodings.maxCodePoint(encoding); int lastHi = -2; for (int i = 0; i < ranges.length; i += 2) { int lo = ranges[i]; int hi = ranges[i + 1]; checkIllegalCodepoint(lo, maxCodePoint); checkIllegalCodepoint(hi, maxCodePoint); if (lo > hi) { throw new IllegalArgumentException(String.format("range [0x%x - 0x%x] out of order", lo, hi)); } if (lo == lastHi + 1) { throw new IllegalArgumentException(String.format("ranges [0x%x - 0x%x] and [0x%x - 0x%x] are directly adjacent and must be merged into one", ranges[i - 2], lastHi, lo, hi)); } if (lastHi >= lo) { throw new IllegalArgumentException("ranges are not sorted"); } lastHi = hi; } } private static IndexOfNode[] extractIndexOfNodes(int[] ranges, Encoding encoding) { if (encoding == Encoding.US_ASCII || encoding == Encoding.ISO_8859_1 || encoding == Encoding.BYTES || getMax(ranges) <= 0x7f) { return extractIndexOfNodes1ByteEncoding(ranges); } else if (encoding == Encoding.UTF_8) { if (isSingleValue(ranges)) { int codepoint = getMin(ranges); byte[] encoded = Encodings.utf8Encode(codepoint); int codeRange = Encodings.isUTF16Surrogate(codepoint) ? TSCodeRange.getBrokenMultiByte() : TSCodeRange.getValidMultiByte(); int codepointLength = Encodings.isUTF16Surrogate(codepoint) ? encoded.length : 1; return new IndexOfNode[]{IndexOfStringNodeGen.create(TSCodeRange.getBrokenMultiByte(), TruffleString.createFromByteArray(encoded, encoded.length, 0, Encoding.UTF_8, codepointLength, codeRange))}; } else { IndexOfNode ascii = extractIndexOfNodeFixedWidth(TSCodeRange.get7Bit(), ranges, ASCII_RANGE); IndexOfRangesNode nonAscii = IndexOfRangesNodeGen.create(TSCodeRange.getBrokenMultiByte(), ranges); return ascii.codeEquals(nonAscii) ? new IndexOfNode[]{nonAscii} : new IndexOfNode[]{ascii, nonAscii}; } } else { ArrayList<IndexOfNode> nodes = new ArrayList<>(); nodes.add(extractIndexOfNodeFixedWidth(TSCodeRange.get7Bit(), ranges, ASCII_RANGE)); addOrReplaceLast(nodes, extractIndexOfNodeFixedWidth(TSCodeRange.get8Bit(), ranges, LATIN_RANGE)); addOrReplaceLast(nodes, extractIndexOfNodeFixedWidth(TSCodeRange.get16Bit(), ranges, BMP_WITHOUT_SURROGATES)); if (encoding == Encoding.UTF_16) { if (!Arrays.equals(intersect(ranges, BMP_WITHOUT_SURROGATES), ranges)) { if (isSingleValue(ranges)) { int codepoint = getMin(ranges); if (Encodings.isUTF16Surrogate(codepoint)) { addOrReplaceLast(nodes, IndexOfAnyValueNodeGen.create(TSCodeRange.getBrokenMultiByte(), new int[]{codepoint})); } else { assert codepoint > 0xffff; byte[] encoded = Encodings.utf16Encode(codepoint); addOrReplaceLast(nodes, IndexOfStringNodeGen.create(TSCodeRange.getBrokenMultiByte(), TruffleString.createFromByteArray(encoded, encoded.length >> 1, 1, Encoding.UTF_16, 1, TSCodeRange.getValidMultiByte()))); } } else { addOrReplaceLast(nodes, IndexOfRangesNodeGen.create(TSCodeRange.getBrokenMultiByte(), ranges)); } } } else if (encoding == Encoding.UTF_32) { addOrReplaceLast(nodes, extractIndexOfNodeFixedWidth(TSCodeRange.getValidFixedWidth(), ranges, ALL_WITHOUT_SURROGATES)); addOrReplaceLast(nodes, extractIndexOfNodeFixedWidth(TSCodeRange.getBrokenFixedWidth(), ranges, ALL)); } else { throw new UnsupportedOperationException(); } return nodes.toArray(IndexOfNode[]::new); } } private static void addOrReplaceLast(ArrayList<IndexOfNode> nodes, IndexOfNode node) { if (nodes.get(nodes.size() - 1).codeEquals(node)) { assert TSCodeRange.isMoreRestrictiveThan(nodes.get(nodes.size() - 1).maxCodeRange, node.maxCodeRange); nodes.remove(nodes.size() - 1); } nodes.add(node); } private static void checkIllegalCodepoint(int c, int maxCodePoint) { if (Integer.toUnsignedLong(c) > maxCodePoint) { throw new IllegalArgumentException(String.format("illegal codepoint value 0x%x", c)); } } private static IndexOfNode[] extractIndexOfNodes1ByteEncoding(int[] ranges) { IndexOfNode ascii = extractIndexOfNodeFixedWidth(TSCodeRange.get7Bit(), ranges, ASCII_RANGE); IndexOfNode latin = extractIndexOfNodeFixedWidth(TSCodeRange.get8Bit(), ranges, LATIN_RANGE); return ascii.codeEquals(latin) ? new IndexOfNode[]{latin} : new IndexOfNode[]{ascii, latin}; } private static int[] intersect(int[] rangesA, int[] rangesB) { if (isEmpty(rangesA) || getMin(rangesB) <= getMin(rangesA) && getHi(rangesB, 0) >= getMax(rangesA)) { return rangesA; } if (size(rangesB) == 1) { return intersectSingleRange(rangesA, rangesB[0], rangesB[1]); } assert size(rangesB) == 2; return intersectTwoRanges(rangesA, rangesB[0], rangesB[1], rangesB[2], rangesB[3]); } private static int[] intersectSingleRange(int[] ranges, int lo, int hi) { int size = size(ranges); int iLo = findFirstIntersection(ranges, lo, 0); int iHi = findLastIntersection(ranges, hi, size - 1); if (iHi < iLo) { return EMPTY_RANGES; } int[] intersection = Arrays.copyOfRange(ranges, iLo << 1, (iHi + 1) << 1); intersection[0] = Math.max(intersection[0], lo); intersection[intersection.length - 1] = Math.min(intersection[intersection.length - 1], hi); return intersection; } private static int findFirstIntersection(int[] ranges, int lo, int startIndex) { int iLo = startIndex; while (iLo < size(ranges) && getHi(ranges, iLo) < lo) { iLo++; } return iLo; } private static int findLastIntersection(int[] ranges, int hi, int startIndex) { int iHi = startIndex; while (iHi >= 0 && getLo(ranges, iHi) > hi) { iHi--; } return iHi; } private static int[] intersectTwoRanges(int[] ranges, int lo0, int hi0, int lo1, int hi1) { if (hi1 < getMin(ranges) || lo0 > getMax(ranges)) { return EMPTY_RANGES; } int size = size(ranges); int iLo0 = findFirstIntersection(ranges, lo0, 0); int iLo1 = findFirstIntersection(ranges, lo1, 0); int iHi0 = findLastIntersection(ranges, hi0, size - 1); int iHi1 = findLastIntersection(ranges, hi1, size - 1); int size0 = Math.max(0, iHi0 + 1 - iLo0); int size1 = Math.max(0, iHi1 + 1 - iLo1); int intersectionSize = size0 + size1; if (intersectionSize == 0) { return EMPTY_RANGES; } int[] intersection = new int[intersectionSize << 1]; System.arraycopy(ranges, iLo0 << 1, intersection, 0, size0 << 1); System.arraycopy(ranges, iLo1 << 1, intersection, size0 << 1, size1 << 1); if (size0 != 0) { intersection[0] = Math.max(intersection[0], lo0); intersection[(size0 << 1) - 1] = Math.min(intersection[(size0 << 1) - 1], hi0); } if (size1 != 0) { intersection[(size0 << 1)] = Math.max(intersection[(size0 << 1)], lo1); intersection[intersection.length - 1] = Math.min(intersection[intersection.length - 1], hi1); } return intersection; } private static IndexOfNode extractIndexOfNodeFixedWidth(int maxCodeRange, int[] ranges, int[] bounds) { int[] intersection = intersect(ranges, bounds); if (intersection.length == 0) { return NoMatchNodeGen.create(maxCodeRange); } if (Arrays.equals(intersection, bounds)) { return AnyMatchNodeGen.create(maxCodeRange); } int valueCount = valueCount(intersection); if (valueCount <= 4) { return IndexOfAnyValueNodeGen.create(maxCodeRange, toValues(intersection, valueCount)); } else if (size(intersection) <= 2) { return IndexOfAnyRangeNodeGen.create(maxCodeRange, intersection); } else if (getMax(intersection) <= 0xff) { byte[] tables = generateTable(intersection); if (tables != null) { return IndexOfTableNodeGen.create(maxCodeRange, tables); } else { return IndexOfBitSetNode.fromRanges(maxCodeRange, intersection); } } return IndexOfRangesNodeGen.create(maxCodeRange, intersection); } private static boolean isEmpty(int[] ranges) { return ranges.length == 0; } /** * Returns the number of ranges in the given list of ranges. */ private static int size(int[] ranges) { return ranges.length >> 1; } /** * Returns the lower bound of range {@code i}. */ private static int getLo(int[] ranges, int i) { return ranges[i << 1]; } /** * Returns the upper bound of range {@code i}. */ private static int getHi(int[] ranges, int i) { return ranges[(i << 1) + 1]; } /** * Returns the minimum value contained in the given list of ranges. */ private static int getMin(int[] ranges) { return ranges[0]; } /** * Returns the maximum value contained in the given list of ranges. */ private static int getMax(int[] ranges) { return ranges[ranges.length - 1]; } /** * Returns {@code true} if the given list of range contains only one value, i.e. it consists of * only one single-value range. */ private static boolean isSingleValue(int[] ranges) { return ranges.length == 2 && ranges[0] == ranges[1]; } /** * Returns the number of values contained in the given list of ranges. */ private static int valueCount(int[] ranges) { int count = 0; for (int i = 0; i < ranges.length; i += 2) { count += (ranges[i + 1] - ranges[i]) + 1; } return count; } /** * Returns {@code true} if the given list of ranges contains value {@code v}. */ private static boolean contains(int[] ranges, int v) { for (int i = 0; i < ranges.length; i += 2) { if (ranges[i] <= v && v <= ranges[i + 1]) { return true; } } return false; } /** * Converts the given list of ranges to an array of values, e.g. * {@code [1-3, 5-6] -> [1, 2, 3, 5, 6]}. */ private static int[] toValues(int[] ranges, int valueCount) { int[] values = new int[valueCount]; int index = 0; for (int i = 0; i < ranges.length; i += 2) { for (int j = ranges[i]; j <= ranges[i + 1]; j++) { values[index++] = j; } } return values; } abstract static class IndexOfNode extends Node { final byte maxCodeRange; IndexOfNode(int maxCodeRange) { assert TSCodeRange.isCodeRange(maxCodeRange); this.maxCodeRange = (byte) maxCodeRange; } abstract int execute(Node location, byte[] arrayA, long offsetA, int lengthA, int strideA, int codeRangeA, int fromIndex, int toIndex, Encoding encoding); @Specialization int doWithConditionProfile(Node location, byte[] arrayA, long offsetA, int lengthA, int strideA, int codeRangeA, int fromIndex, int toIndex, Encoding encoding, @Cached InlinedBranchProfile branchProfile) { branchProfile.enter(this); return runSearch(location, arrayA, offsetA, lengthA, strideA, codeRangeA, fromIndex, toIndex, encoding); } @SuppressWarnings("unused") int runSearch(Node location, byte[] arrayA, long offsetA, int lengthA, int strideA, int codeRangeA, int fromIndex, int toIndex, Encoding encoding) { throw CompilerDirectives.shouldNotReachHere(); } @SuppressWarnings("unused") boolean codeEquals(IndexOfNode other) { throw CompilerDirectives.shouldNotReachHere(); } @SuppressWarnings("unused") IndexOfNode shallowCopy() { throw CompilerDirectives.shouldNotReachHere(); } final byte getMaxCodeRange() { return maxCodeRange; } final boolean isFast() { return this instanceof OptimizedIndexOfNode; } } abstract static class OptimizedIndexOfNode extends IndexOfNode { OptimizedIndexOfNode(int maxCodeRange) { super(maxCodeRange); } } abstract static class ScalarIndexOfNode extends IndexOfNode { ScalarIndexOfNode(int maxCodeRange) { super(maxCodeRange); } @Override int runSearch(Node location, byte[] arrayA, long offsetA, int lengthA, int strideA, int codeRangeA, int fromIndex, int toIndex, Encoding encoding) { CompilerAsserts.partialEvaluationConstant(this); CompilerAsserts.partialEvaluationConstant(encoding); int codepointLength = 1; // iterate codepoints for (int i = fromIndex; i < toIndex; i += codepointLength) { final int codepoint; if (encoding == Encoding.US_ASCII || encoding == Encoding.ISO_8859_1 || encoding == Encoding.BYTES || TSCodeRange.isFixedWidth(codeRangeA)) { // fixed-width encoding: just read the next array element codepoint = TStringOps.readValue(arrayA, offsetA, lengthA, strideA, i); } else if (encoding == Encoding.UTF_8) { // utf-8 decode if (TSCodeRange.isValid(codeRangeA)) { int firstByte = TStringOps.readS0(arrayA, offsetA, lengthA, i); codepointLength = firstByte <= 0x7f ? 1 : Encodings.utf8CodePointLength(firstByte); codepoint = Encodings.utf8DecodeValid(arrayA, offsetA, lengthA, i); } else { codepointLength = Encodings.utf8GetCodePointLength(arrayA, offsetA, lengthA, i, DecodingErrorHandler.DEFAULT); codepoint = Encodings.utf8DecodeBroken(arrayA, offsetA, lengthA, i, TruffleString.ErrorHandling.BEST_EFFORT); } } else { // utf-16 decode assert encoding == Encoding.UTF_16; if (TSCodeRange.isValid(codeRangeA)) { codepointLength = Encodings.isUTF16HighSurrogate(TStringOps.readS1(arrayA, offsetA, lengthA, i)) ? 2 : 1; codepoint = Encodings.utf16DecodeValid(arrayA, offsetA, lengthA, i); } else { codepointLength = Encodings.utf16BrokenGetCodePointByteLength(arrayA, offsetA, lengthA, i, TruffleString.ErrorHandling.BEST_EFFORT) >> 1; codepoint = Encodings.utf16DecodeBroken(arrayA, offsetA, lengthA, i, TruffleString.ErrorHandling.BEST_EFFORT); } } // check if the decoded codepoint is contained in the codepoint set if (match(codepoint)) { return i; } } return -1; } @SuppressWarnings("unused") boolean match(int codepoint) { throw CompilerDirectives.shouldNotReachHere(); } } /** * No match possible. */ abstract static class NoMatch extends OptimizedIndexOfNode { NoMatch(int maxCodeRange) { super(maxCodeRange); } @Override int runSearch(Node location, byte[] arrayA, long offsetA, int lengthA, int strideA, int codeRangeA, int fromIndex, int toIndex, Encoding encoding) { return -1; } @Override boolean codeEquals(IndexOfNode other) { return other instanceof NoMatch; } @Override IndexOfNode shallowCopy() { return NoMatchNodeGen.create(maxCodeRange); } } /** * Will always match immediately. */ abstract static class AnyMatch extends OptimizedIndexOfNode { AnyMatch(int maxCodeRange) { super(maxCodeRange); } @Override int runSearch(Node location, byte[] arrayA, long offsetA, int lengthA, int strideA, int codeRangeA, int fromIndex, int toIndex, Encoding encoding) { return fromIndex; } @Override boolean codeEquals(IndexOfNode other) { return other instanceof AnyMatch; } @Override IndexOfNode shallowCopy() { return AnyMatchNodeGen.create(maxCodeRange); } } /** * Match any of up to four values, without decoding. */ abstract static class IndexOfAnyValueNode extends OptimizedIndexOfNode { @CompilationFinal(dimensions = 1) final int[] values; IndexOfAnyValueNode(int maxCodeRange, int[] values) { super(maxCodeRange); this.values = values; } @Override int runSearch(Node location, byte[] arrayA, long offsetA, int lengthA, int strideA, int codeRangeA, int fromIndex, int toIndex, Encoding encoding) { return TStringOps.indexOfAnyInt(location, arrayA, offsetA, strideA, fromIndex, toIndex, values); } @Override boolean codeEquals(IndexOfNode other) { return other instanceof IndexOfAnyValueNode && Arrays.equals(values, ((IndexOfAnyValueNode) other).values); } @Override IndexOfNode shallowCopy() { return IndexOfAnyValueNodeGen.create(maxCodeRange, values); } } /** * Match any of up to two ranges, without decoding. */ abstract static class IndexOfAnyRangeNode extends OptimizedIndexOfNode { @CompilationFinal(dimensions = 1) final int[] ranges; IndexOfAnyRangeNode(int maxCodeRange, int[] ranges) { super(maxCodeRange); this.ranges = ranges; } @Override int runSearch(Node location, byte[] arrayA, long offsetA, int lengthA, int strideA, int codeRangeA, int fromIndex, int toIndex, Encoding encoding) { return TStringOps.indexOfAnyIntRange(location, arrayA, offsetA, strideA, fromIndex, toIndex, ranges); } @Override boolean codeEquals(IndexOfNode other) { return other instanceof IndexOfAnyRangeNode && Arrays.equals(ranges, ((IndexOfAnyRangeNode) other).ranges); } @Override IndexOfNode shallowCopy() { return IndexOfAnyRangeNodeGen.create(maxCodeRange, ranges); } } /** * Optimized search for bit set. */ abstract static class IndexOfTableNode extends OptimizedIndexOfNode { @CompilationFinal(dimensions = 1) final byte[] tables; IndexOfTableNode(int maxCodeRange, byte[] tables) { super(maxCodeRange); assert tables.length == TABLE_SIZE * 2; this.tables = tables; } @Override int runSearch(Node location, byte[] arrayA, long offsetA, int lengthA, int strideA, int codeRangeA, int fromIndex, int toIndex, Encoding encoding) { return TStringOps.indexOfTable(location, arrayA, offsetA, strideA, fromIndex, toIndex, tables); } @Override boolean codeEquals(IndexOfNode other) { return other instanceof IndexOfTableNode && Arrays.equals(tables, ((IndexOfTableNode) other).tables); } @Override IndexOfNode shallowCopy() { return IndexOfTableNodeGen.create(maxCodeRange, tables); } } abstract static class IndexOfStringNode extends OptimizedIndexOfNode { final TruffleString str; IndexOfStringNode(int maxCodeRange, TruffleString string) { super(maxCodeRange); this.str = string; } @Override int runSearch(Node location, byte[] arrayA, long offsetA, int lengthA, int strideA, int codeRangeA, int fromIndex, int toIndex, Encoding encoding) { assert str.isManaged() && str.isMaterialized() && str.offset() == 0; return TStringOps.indexOfStringWithOrMaskWithStride(location, arrayA, offsetA, lengthA, strideA, (byte[]) str.data(), byteArrayBaseOffset(), str.length(), str.stride(), fromIndex, toIndex, null); } @Override boolean codeEquals(IndexOfNode other) { return other instanceof IndexOfStringNode && str.equals(((IndexOfStringNode) other).str); } @Override IndexOfNode shallowCopy() { return IndexOfStringNodeGen.create(maxCodeRange, str); } } abstract static class IndexOfBitSetNode extends ScalarIndexOfNode { @CompilationFinal(dimensions = 1) final long[] bitSet; IndexOfBitSetNode(int maxCodeRange, long[] bitSet) { super(maxCodeRange); this.bitSet = bitSet; } @Override boolean match(int codepoint) { int wordIndex = codepoint >> 6; return wordIndex < bitSet.length && (bitSet[wordIndex] & 1L << (codepoint & 63)) != 0; } @Override boolean codeEquals(IndexOfNode other) { return other instanceof IndexOfBitSetNode && Arrays.equals(bitSet, ((IndexOfBitSetNode) other).bitSet); } @Override IndexOfNode shallowCopy() { return IndexOfBitSetNodeGen.create(maxCodeRange, bitSet); } static IndexOfBitSetNode fromRanges(int maxCodeRange, int[] ranges) { assert getMax(ranges) <= 0xff; long[] bitSet = new long[4]; for (int i = 0; i < ranges.length; i += 2) { setRange(bitSet, ranges[i], ranges[i + 1]); } return IndexOfBitSetNodeGen.create(maxCodeRange, bitSet); } /** * Sets all values contained in range {@code [lo-hi]} (inclusive) to {@code 1} in the given * long-array based bit set. */ private static void setRange(long[] bitSet, int lo, int hi) { int wordIndexLo = lo >> 6; int wordIndexHi = hi >> 6; long rangeLo = (~0L) << lo; long rangeHi = (~0L) >>> (63 - (hi & 63)); if (wordIndexLo == wordIndexHi) { bitSet[wordIndexLo] |= rangeLo & rangeHi; return; } bitSet[wordIndexLo] |= rangeLo; for (int i = wordIndexLo + 1; i < wordIndexHi; i++) { bitSet[i] = ~0L; } bitSet[wordIndexHi] |= rangeHi; } } abstract static class IndexOfRangesNode extends ScalarIndexOfNode { @CompilationFinal(dimensions = 1) final int[] ranges; IndexOfRangesNode(int maxCodeRange, int[] ranges) { super(maxCodeRange); this.ranges = ranges; } @Override boolean match(int c) { return rangesContain(ranges, c); } static boolean rangesContain(int[] ranges, int c) { int fromIndex = 0; int toIndex = (ranges.length >>> 1) - 1; while (fromIndex <= toIndex) { final int mid = (fromIndex + toIndex) >>> 1; if (c < ranges[mid << 1]) { toIndex = mid - 1; } else if (c > ranges[(mid << 1) + 1]) { fromIndex = mid + 1; } else { return true; } } return false; } @Override boolean codeEquals(IndexOfNode other) { return other instanceof IndexOfRangesNode && Arrays.equals(ranges, ((IndexOfRangesNode) other).ranges); } @Override IndexOfNode shallowCopy() { return IndexOfRangesNodeGen.create(maxCodeRange, ranges); } } /** * Converts a given list of ranges to a lookup table suitable for {@link IndexOfTableNode}. * * @return the lookup table, or {@code null} if no suitable lookup table could be generated. */ private static byte[] generateTable(int[] ranges) { assert getMax(ranges) <= 0xff; /* * Convert ranges to a 16x16 bit set. Matching a byte with this bit set would work like * this: * * byte v = readByte(...); * * boolean match = (bitSet[v >>> 4] & (1 << (v & 0xf)) != 0; * * Now we have to transform this bit set to a 32-byte lookup table that can be matched like * this: * * boolean match = (table[v >>> 4] & table[16 + (v & 0xf)]) != 0; * * In the following (v >>> 4) is referred to as the "upper nibble" and (v & 0xf) is referred * to as the "lower nibble". */ char[] bitSet = new char[16]; for (int i = 0; i < ranges.length; i += 2) { setRange(bitSet, ranges[i], ranges[i + 1]); } // find equal 16-bit values in the 16x16 bit set char[] uniqueValues = new char[16]; int nUniqueValues = 0; for (char c : bitSet) { if (c != 0 && ArrayUtils.indexOf(uniqueValues, 0, uniqueValues.length, c) < 0) { uniqueValues[nUniqueValues++] = c; } } if (nUniqueValues <= 8) { return generateTableDirectMapping(ranges, bitSet, uniqueValues, nUniqueValues); } else { return generateTableTryDecomposition(ranges, bitSet, uniqueValues, nUniqueValues); } } private static byte[] generateTableDirectMapping(int[] ranges, char[] bitSet, char[] uniqueValues, int nUniqueValues) { byte[] tables = new byte[TABLE_SIZE * 2]; // If there are no more than 8 unique values, we can assign one unique bit per upper // nibble values: // iterate all possible upper nibble values for (int upperNibble = 0; upperNibble < TABLE_SIZE; upperNibble++) { if (bitSet[upperNibble] != 0) { // get the unique bit corresponding to the current upper nibble value byte uniqueBit = (byte) (1 << ArrayUtils.indexOf(uniqueValues, 0, nUniqueValues, bitSet[upperNibble])); // set upper nibble entry tables[upperNibble] = uniqueBit; // add the unique bit to all lower nibble entries that should match in // conjunction with the current upper nibble for (int lowerNibble = 0; lowerNibble < TABLE_SIZE; lowerNibble++) { if ((bitSet[upperNibble] & (1 << lowerNibble)) != 0) { tables[TABLE_SIZE + lowerNibble] |= uniqueBit; } } } } verifyTable(ranges, tables); return tables; } private static byte[] generateTableTryDecomposition(int[] ranges, char[] bitSet, char[] uniqueValues, int nUniqueValues) { assert nUniqueValues > 8; byte[] tables = new byte[TABLE_SIZE * 2]; // if we have more than 8 unique bit set values, try to reduce them by decomposition, i.e. // try to find values that can be expressed as a union of other values in the bit set CompositeBitSet[] bitSets = new CompositeBitSet[nUniqueValues]; for (int i = 0; i < nUniqueValues; i++) { bitSets[i] = new CompositeBitSet(); } int nComponents = nUniqueValues; ArrayList<CompositeBitSet> components = new ArrayList<>(); for (int i = 0; i < bitSets.length; i++) { char cur = uniqueValues[i]; char compositeValue = 0; components.clear(); for (int j = 0; j < bitSets.length; j++) { if (j == i) { continue; } if ((cur | uniqueValues[j]) == cur) { // uniqueValues[j] is a subset of cur, add it to the list of components compositeValue |= uniqueValues[j]; components.add(bitSets[j]); } } if (compositeValue == cur) { // we found a list of components whose union is exactly _cur_, save it bitSets[i].components = components.toArray(CompositeBitSet[]::new); nComponents--; } } if (nComponents > 8) { // if there are still more than 8 unique bit set values after decomposition, give up. return null; } byte uniqueBit = 1; for (int i = 0; i < bitSets.length; i++) { CompositeBitSet cbs = bitSets[i]; if (cbs.components == null) { assert uniqueBit != 0; // assign one unique bit per component that could _not_ be decomposed. cbs.uniqueBit = uniqueBit; // add the unique bit to all lower nibble entries that should match in // conjunction with the current upper nibble for (int lowerNibble = 0; lowerNibble < TABLE_SIZE; lowerNibble++) { if ((uniqueValues[i] & (1 << lowerNibble)) != 0) { tables[TABLE_SIZE + lowerNibble] |= uniqueBit; } } uniqueBit <<= 1; } } for (CompositeBitSet cbs : bitSets) { if (cbs.components != null) { // assign union of subcomponent's unique bits to decomposed values for (CompositeBitSet component : cbs.components) { cbs.uniqueBit |= component.uniqueBit; } } } // write upper nibble mapping to table for (int upperNibble = 0; upperNibble < TABLE_SIZE; upperNibble++) { if (bitSet[upperNibble] != 0) { tables[upperNibble] = bitSets[ArrayUtils.indexOf(uniqueValues, 0, nUniqueValues, bitSet[upperNibble])].uniqueBit; } } verifyTable(ranges, tables); return tables; } private static final class CompositeBitSet { private byte uniqueBit; private CompositeBitSet[] components; } /** * Sets all values contained in range {@code [lo-hi]} (inclusive) to {@code 1} in the given * char-array based bit set. */ private static void setRange(char[] bitSet, int lo, int hi) { int wordIndexLo = lo >> 4; int wordIndexHi = hi >> 4; char rangeLo = (char) (0xffff << (lo & 0xf)); char rangeHi = (char) (0xffff >>> (15 - (hi & 0xf))); if (wordIndexLo == wordIndexHi) { bitSet[wordIndexLo] |= (char) (rangeLo & rangeHi); return; } bitSet[wordIndexLo] |= rangeLo; for (int i = wordIndexLo + 1; i < wordIndexHi; i++) { bitSet[i] = (char) ~0; } bitSet[wordIndexHi] |= rangeHi; } private static void verifyTable(int[] expectedRanges, byte[] tables) { assert verifyTableInner(expectedRanges, tables); } private static boolean verifyTableInner(int[] expectedRanges, byte[] tables) { for (int i = 0; i <= 0xff; i++) { assert contains(expectedRanges, i) == ((tables[(i >>> 4) & 0xf] & tables[TABLE_SIZE + (i & 0xf)]) != 0); } return true; } }
googleapis/google-cloud-java
36,143
java-cloudsupport/proto-google-cloud-cloudsupport-v2/src/main/java/com/google/cloud/support/v2/ListCasesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/support/v2/case_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.support.v2; /** * * * <pre> * The response message for the ListCases endpoint. * </pre> * * Protobuf type {@code google.cloud.support.v2.ListCasesResponse} */ public final class ListCasesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.support.v2.ListCasesResponse) ListCasesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListCasesResponse.newBuilder() to construct. private ListCasesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListCasesResponse() { cases_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListCasesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.support.v2.CaseServiceProto .internal_static_google_cloud_support_v2_ListCasesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.support.v2.CaseServiceProto .internal_static_google_cloud_support_v2_ListCasesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.support.v2.ListCasesResponse.class, com.google.cloud.support.v2.ListCasesResponse.Builder.class); } public static final int CASES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.support.v2.Case> cases_; /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2.Case cases = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.support.v2.Case> getCasesList() { return cases_; } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2.Case cases = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.support.v2.CaseOrBuilder> getCasesOrBuilderList() { return cases_; } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2.Case cases = 1;</code> */ @java.lang.Override public int getCasesCount() { return cases_.size(); } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2.Case cases = 1;</code> */ @java.lang.Override public com.google.cloud.support.v2.Case getCases(int index) { return cases_.get(index); } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2.Case cases = 1;</code> */ @java.lang.Override public com.google.cloud.support.v2.CaseOrBuilder getCasesOrBuilder(int index) { return cases_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.list` requests. If unspecified, there are no * more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.list` requests. If unspecified, there are no * more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < cases_.size(); i++) { output.writeMessage(1, cases_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < cases_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, cases_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.support.v2.ListCasesResponse)) { return super.equals(obj); } com.google.cloud.support.v2.ListCasesResponse other = (com.google.cloud.support.v2.ListCasesResponse) obj; if (!getCasesList().equals(other.getCasesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getCasesCount() > 0) { hash = (37 * hash) + CASES_FIELD_NUMBER; hash = (53 * hash) + getCasesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.support.v2.ListCasesResponse parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.support.v2.ListCasesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.support.v2.ListCasesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.support.v2.ListCasesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.support.v2.ListCasesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.support.v2.ListCasesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.support.v2.ListCasesResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.support.v2.ListCasesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.support.v2.ListCasesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.support.v2.ListCasesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.support.v2.ListCasesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.support.v2.ListCasesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.support.v2.ListCasesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The response message for the ListCases endpoint. * </pre> * * Protobuf type {@code google.cloud.support.v2.ListCasesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.support.v2.ListCasesResponse) com.google.cloud.support.v2.ListCasesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.support.v2.CaseServiceProto .internal_static_google_cloud_support_v2_ListCasesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.support.v2.CaseServiceProto .internal_static_google_cloud_support_v2_ListCasesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.support.v2.ListCasesResponse.class, com.google.cloud.support.v2.ListCasesResponse.Builder.class); } // Construct using com.google.cloud.support.v2.ListCasesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (casesBuilder_ == null) { cases_ = java.util.Collections.emptyList(); } else { cases_ = null; casesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.support.v2.CaseServiceProto .internal_static_google_cloud_support_v2_ListCasesResponse_descriptor; } @java.lang.Override public com.google.cloud.support.v2.ListCasesResponse getDefaultInstanceForType() { return com.google.cloud.support.v2.ListCasesResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.support.v2.ListCasesResponse build() { com.google.cloud.support.v2.ListCasesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.support.v2.ListCasesResponse buildPartial() { com.google.cloud.support.v2.ListCasesResponse result = new com.google.cloud.support.v2.ListCasesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(com.google.cloud.support.v2.ListCasesResponse result) { if (casesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { cases_ = java.util.Collections.unmodifiableList(cases_); bitField0_ = (bitField0_ & ~0x00000001); } result.cases_ = cases_; } else { result.cases_ = casesBuilder_.build(); } } private void buildPartial0(com.google.cloud.support.v2.ListCasesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.support.v2.ListCasesResponse) { return mergeFrom((com.google.cloud.support.v2.ListCasesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.support.v2.ListCasesResponse other) { if (other == com.google.cloud.support.v2.ListCasesResponse.getDefaultInstance()) return this; if (casesBuilder_ == null) { if (!other.cases_.isEmpty()) { if (cases_.isEmpty()) { cases_ = other.cases_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureCasesIsMutable(); cases_.addAll(other.cases_); } onChanged(); } } else { if (!other.cases_.isEmpty()) { if (casesBuilder_.isEmpty()) { casesBuilder_.dispose(); casesBuilder_ = null; cases_ = other.cases_; bitField0_ = (bitField0_ & ~0x00000001); casesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getCasesFieldBuilder() : null; } else { casesBuilder_.addAllMessages(other.cases_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.support.v2.Case m = input.readMessage(com.google.cloud.support.v2.Case.parser(), extensionRegistry); if (casesBuilder_ == null) { ensureCasesIsMutable(); cases_.add(m); } else { casesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.support.v2.Case> cases_ = java.util.Collections.emptyList(); private void ensureCasesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { cases_ = new java.util.ArrayList<com.google.cloud.support.v2.Case>(cases_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.support.v2.Case, com.google.cloud.support.v2.Case.Builder, com.google.cloud.support.v2.CaseOrBuilder> casesBuilder_; /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2.Case cases = 1;</code> */ public java.util.List<com.google.cloud.support.v2.Case> getCasesList() { if (casesBuilder_ == null) { return java.util.Collections.unmodifiableList(cases_); } else { return casesBuilder_.getMessageList(); } } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2.Case cases = 1;</code> */ public int getCasesCount() { if (casesBuilder_ == null) { return cases_.size(); } else { return casesBuilder_.getCount(); } } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2.Case cases = 1;</code> */ public com.google.cloud.support.v2.Case getCases(int index) { if (casesBuilder_ == null) { return cases_.get(index); } else { return casesBuilder_.getMessage(index); } } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2.Case cases = 1;</code> */ public Builder setCases(int index, com.google.cloud.support.v2.Case value) { if (casesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCasesIsMutable(); cases_.set(index, value); onChanged(); } else { casesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2.Case cases = 1;</code> */ public Builder setCases(int index, com.google.cloud.support.v2.Case.Builder builderForValue) { if (casesBuilder_ == null) { ensureCasesIsMutable(); cases_.set(index, builderForValue.build()); onChanged(); } else { casesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2.Case cases = 1;</code> */ public Builder addCases(com.google.cloud.support.v2.Case value) { if (casesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCasesIsMutable(); cases_.add(value); onChanged(); } else { casesBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2.Case cases = 1;</code> */ public Builder addCases(int index, com.google.cloud.support.v2.Case value) { if (casesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCasesIsMutable(); cases_.add(index, value); onChanged(); } else { casesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2.Case cases = 1;</code> */ public Builder addCases(com.google.cloud.support.v2.Case.Builder builderForValue) { if (casesBuilder_ == null) { ensureCasesIsMutable(); cases_.add(builderForValue.build()); onChanged(); } else { casesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2.Case cases = 1;</code> */ public Builder addCases(int index, com.google.cloud.support.v2.Case.Builder builderForValue) { if (casesBuilder_ == null) { ensureCasesIsMutable(); cases_.add(index, builderForValue.build()); onChanged(); } else { casesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2.Case cases = 1;</code> */ public Builder addAllCases( java.lang.Iterable<? extends com.google.cloud.support.v2.Case> values) { if (casesBuilder_ == null) { ensureCasesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, cases_); onChanged(); } else { casesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2.Case cases = 1;</code> */ public Builder clearCases() { if (casesBuilder_ == null) { cases_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { casesBuilder_.clear(); } return this; } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2.Case cases = 1;</code> */ public Builder removeCases(int index) { if (casesBuilder_ == null) { ensureCasesIsMutable(); cases_.remove(index); onChanged(); } else { casesBuilder_.remove(index); } return this; } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2.Case cases = 1;</code> */ public com.google.cloud.support.v2.Case.Builder getCasesBuilder(int index) { return getCasesFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2.Case cases = 1;</code> */ public com.google.cloud.support.v2.CaseOrBuilder getCasesOrBuilder(int index) { if (casesBuilder_ == null) { return cases_.get(index); } else { return casesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2.Case cases = 1;</code> */ public java.util.List<? extends com.google.cloud.support.v2.CaseOrBuilder> getCasesOrBuilderList() { if (casesBuilder_ != null) { return casesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(cases_); } } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2.Case cases = 1;</code> */ public com.google.cloud.support.v2.Case.Builder addCasesBuilder() { return getCasesFieldBuilder() .addBuilder(com.google.cloud.support.v2.Case.getDefaultInstance()); } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2.Case cases = 1;</code> */ public com.google.cloud.support.v2.Case.Builder addCasesBuilder(int index) { return getCasesFieldBuilder() .addBuilder(index, com.google.cloud.support.v2.Case.getDefaultInstance()); } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2.Case cases = 1;</code> */ public java.util.List<com.google.cloud.support.v2.Case.Builder> getCasesBuilderList() { return getCasesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.support.v2.Case, com.google.cloud.support.v2.Case.Builder, com.google.cloud.support.v2.CaseOrBuilder> getCasesFieldBuilder() { if (casesBuilder_ == null) { casesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.support.v2.Case, com.google.cloud.support.v2.Case.Builder, com.google.cloud.support.v2.CaseOrBuilder>( cases_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); cases_ = null; } return casesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.list` requests. If unspecified, there are no * more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.list` requests. If unspecified, there are no * more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.list` requests. If unspecified, there are no * more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.list` requests. If unspecified, there are no * more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.list` requests. If unspecified, there are no * more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.support.v2.ListCasesResponse) } // @@protoc_insertion_point(class_scope:google.cloud.support.v2.ListCasesResponse) private static final com.google.cloud.support.v2.ListCasesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.support.v2.ListCasesResponse(); } public static com.google.cloud.support.v2.ListCasesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListCasesResponse> PARSER = new com.google.protobuf.AbstractParser<ListCasesResponse>() { @java.lang.Override public ListCasesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListCasesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListCasesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.support.v2.ListCasesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,188
java-securesourcemanager/proto-google-cloud-securesourcemanager-v1/src/main/java/com/google/cloud/securesourcemanager/v1/UpdateIssueRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/securesourcemanager/v1/secure_source_manager.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.securesourcemanager.v1; /** * * * <pre> * The request to update an issue. * </pre> * * Protobuf type {@code google.cloud.securesourcemanager.v1.UpdateIssueRequest} */ public final class UpdateIssueRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.securesourcemanager.v1.UpdateIssueRequest) UpdateIssueRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateIssueRequest.newBuilder() to construct. private UpdateIssueRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateIssueRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateIssueRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto .internal_static_google_cloud_securesourcemanager_v1_UpdateIssueRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto .internal_static_google_cloud_securesourcemanager_v1_UpdateIssueRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securesourcemanager.v1.UpdateIssueRequest.class, com.google.cloud.securesourcemanager.v1.UpdateIssueRequest.Builder.class); } private int bitField0_; public static final int ISSUE_FIELD_NUMBER = 1; private com.google.cloud.securesourcemanager.v1.Issue issue_; /** * * * <pre> * Required. The issue to update. * </pre> * * <code> * .google.cloud.securesourcemanager.v1.Issue issue = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the issue field is set. */ @java.lang.Override public boolean hasIssue() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The issue to update. * </pre> * * <code> * .google.cloud.securesourcemanager.v1.Issue issue = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The issue. */ @java.lang.Override public com.google.cloud.securesourcemanager.v1.Issue getIssue() { return issue_ == null ? com.google.cloud.securesourcemanager.v1.Issue.getDefaultInstance() : issue_; } /** * * * <pre> * Required. The issue to update. * </pre> * * <code> * .google.cloud.securesourcemanager.v1.Issue issue = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.securesourcemanager.v1.IssueOrBuilder getIssueOrBuilder() { return issue_ == null ? com.google.cloud.securesourcemanager.v1.Issue.getDefaultInstance() : issue_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * issue resource by the update. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. * The special value "*" means full replacement. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * issue resource by the update. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. * The special value "*" means full replacement. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * issue resource by the update. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. * The special value "*" means full replacement. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getIssue()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getIssue()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.securesourcemanager.v1.UpdateIssueRequest)) { return super.equals(obj); } com.google.cloud.securesourcemanager.v1.UpdateIssueRequest other = (com.google.cloud.securesourcemanager.v1.UpdateIssueRequest) obj; if (hasIssue() != other.hasIssue()) return false; if (hasIssue()) { if (!getIssue().equals(other.getIssue())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasIssue()) { hash = (37 * hash) + ISSUE_FIELD_NUMBER; hash = (53 * hash) + getIssue().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.securesourcemanager.v1.UpdateIssueRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securesourcemanager.v1.UpdateIssueRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securesourcemanager.v1.UpdateIssueRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securesourcemanager.v1.UpdateIssueRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securesourcemanager.v1.UpdateIssueRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securesourcemanager.v1.UpdateIssueRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securesourcemanager.v1.UpdateIssueRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securesourcemanager.v1.UpdateIssueRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securesourcemanager.v1.UpdateIssueRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.securesourcemanager.v1.UpdateIssueRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securesourcemanager.v1.UpdateIssueRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securesourcemanager.v1.UpdateIssueRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.securesourcemanager.v1.UpdateIssueRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The request to update an issue. * </pre> * * Protobuf type {@code google.cloud.securesourcemanager.v1.UpdateIssueRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.securesourcemanager.v1.UpdateIssueRequest) com.google.cloud.securesourcemanager.v1.UpdateIssueRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto .internal_static_google_cloud_securesourcemanager_v1_UpdateIssueRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto .internal_static_google_cloud_securesourcemanager_v1_UpdateIssueRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securesourcemanager.v1.UpdateIssueRequest.class, com.google.cloud.securesourcemanager.v1.UpdateIssueRequest.Builder.class); } // Construct using com.google.cloud.securesourcemanager.v1.UpdateIssueRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getIssueFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; issue_ = null; if (issueBuilder_ != null) { issueBuilder_.dispose(); issueBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto .internal_static_google_cloud_securesourcemanager_v1_UpdateIssueRequest_descriptor; } @java.lang.Override public com.google.cloud.securesourcemanager.v1.UpdateIssueRequest getDefaultInstanceForType() { return com.google.cloud.securesourcemanager.v1.UpdateIssueRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.securesourcemanager.v1.UpdateIssueRequest build() { com.google.cloud.securesourcemanager.v1.UpdateIssueRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.securesourcemanager.v1.UpdateIssueRequest buildPartial() { com.google.cloud.securesourcemanager.v1.UpdateIssueRequest result = new com.google.cloud.securesourcemanager.v1.UpdateIssueRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.securesourcemanager.v1.UpdateIssueRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.issue_ = issueBuilder_ == null ? issue_ : issueBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.securesourcemanager.v1.UpdateIssueRequest) { return mergeFrom((com.google.cloud.securesourcemanager.v1.UpdateIssueRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.securesourcemanager.v1.UpdateIssueRequest other) { if (other == com.google.cloud.securesourcemanager.v1.UpdateIssueRequest.getDefaultInstance()) return this; if (other.hasIssue()) { mergeIssue(other.getIssue()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getIssueFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.securesourcemanager.v1.Issue issue_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.securesourcemanager.v1.Issue, com.google.cloud.securesourcemanager.v1.Issue.Builder, com.google.cloud.securesourcemanager.v1.IssueOrBuilder> issueBuilder_; /** * * * <pre> * Required. The issue to update. * </pre> * * <code> * .google.cloud.securesourcemanager.v1.Issue issue = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the issue field is set. */ public boolean hasIssue() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The issue to update. * </pre> * * <code> * .google.cloud.securesourcemanager.v1.Issue issue = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The issue. */ public com.google.cloud.securesourcemanager.v1.Issue getIssue() { if (issueBuilder_ == null) { return issue_ == null ? com.google.cloud.securesourcemanager.v1.Issue.getDefaultInstance() : issue_; } else { return issueBuilder_.getMessage(); } } /** * * * <pre> * Required. The issue to update. * </pre> * * <code> * .google.cloud.securesourcemanager.v1.Issue issue = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setIssue(com.google.cloud.securesourcemanager.v1.Issue value) { if (issueBuilder_ == null) { if (value == null) { throw new NullPointerException(); } issue_ = value; } else { issueBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The issue to update. * </pre> * * <code> * .google.cloud.securesourcemanager.v1.Issue issue = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setIssue(com.google.cloud.securesourcemanager.v1.Issue.Builder builderForValue) { if (issueBuilder_ == null) { issue_ = builderForValue.build(); } else { issueBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The issue to update. * </pre> * * <code> * .google.cloud.securesourcemanager.v1.Issue issue = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeIssue(com.google.cloud.securesourcemanager.v1.Issue value) { if (issueBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && issue_ != null && issue_ != com.google.cloud.securesourcemanager.v1.Issue.getDefaultInstance()) { getIssueBuilder().mergeFrom(value); } else { issue_ = value; } } else { issueBuilder_.mergeFrom(value); } if (issue_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The issue to update. * </pre> * * <code> * .google.cloud.securesourcemanager.v1.Issue issue = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearIssue() { bitField0_ = (bitField0_ & ~0x00000001); issue_ = null; if (issueBuilder_ != null) { issueBuilder_.dispose(); issueBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The issue to update. * </pre> * * <code> * .google.cloud.securesourcemanager.v1.Issue issue = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.securesourcemanager.v1.Issue.Builder getIssueBuilder() { bitField0_ |= 0x00000001; onChanged(); return getIssueFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The issue to update. * </pre> * * <code> * .google.cloud.securesourcemanager.v1.Issue issue = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.securesourcemanager.v1.IssueOrBuilder getIssueOrBuilder() { if (issueBuilder_ != null) { return issueBuilder_.getMessageOrBuilder(); } else { return issue_ == null ? com.google.cloud.securesourcemanager.v1.Issue.getDefaultInstance() : issue_; } } /** * * * <pre> * Required. The issue to update. * </pre> * * <code> * .google.cloud.securesourcemanager.v1.Issue issue = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.securesourcemanager.v1.Issue, com.google.cloud.securesourcemanager.v1.Issue.Builder, com.google.cloud.securesourcemanager.v1.IssueOrBuilder> getIssueFieldBuilder() { if (issueBuilder_ == null) { issueBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.securesourcemanager.v1.Issue, com.google.cloud.securesourcemanager.v1.Issue.Builder, com.google.cloud.securesourcemanager.v1.IssueOrBuilder>( getIssue(), getParentForChildren(), isClean()); issue_ = null; } return issueBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * issue resource by the update. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. * The special value "*" means full replacement. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * issue resource by the update. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. * The special value "*" means full replacement. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * issue resource by the update. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. * The special value "*" means full replacement. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * issue resource by the update. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. * The special value "*" means full replacement. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * issue resource by the update. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. * The special value "*" means full replacement. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * issue resource by the update. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. * The special value "*" means full replacement. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * issue resource by the update. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. * The special value "*" means full replacement. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * issue resource by the update. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. * The special value "*" means full replacement. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * issue resource by the update. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. * The special value "*" means full replacement. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.securesourcemanager.v1.UpdateIssueRequest) } // @@protoc_insertion_point(class_scope:google.cloud.securesourcemanager.v1.UpdateIssueRequest) private static final com.google.cloud.securesourcemanager.v1.UpdateIssueRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.securesourcemanager.v1.UpdateIssueRequest(); } public static com.google.cloud.securesourcemanager.v1.UpdateIssueRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateIssueRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateIssueRequest>() { @java.lang.Override public UpdateIssueRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateIssueRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateIssueRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.securesourcemanager.v1.UpdateIssueRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/flink
36,535
flink-runtime/src/test/java/org/apache/flink/runtime/io/network/partition/consumer/LocalInputChannelTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.io.network.partition.consumer; import org.apache.flink.runtime.checkpoint.CheckpointOptions; import org.apache.flink.runtime.checkpoint.CheckpointType; import org.apache.flink.runtime.checkpoint.channel.RecordingChannelStateWriter; import org.apache.flink.runtime.execution.CancelTaskException; import org.apache.flink.runtime.io.disk.NoOpFileChannelManager; import org.apache.flink.runtime.io.network.TaskEventDispatcher; import org.apache.flink.runtime.io.network.api.CheckpointBarrier; import org.apache.flink.runtime.io.network.api.serialization.EventSerializer; import org.apache.flink.runtime.io.network.buffer.Buffer; import org.apache.flink.runtime.io.network.buffer.BufferConsumer; import org.apache.flink.runtime.io.network.buffer.BufferPool; import org.apache.flink.runtime.io.network.buffer.BufferProvider; import org.apache.flink.runtime.io.network.buffer.NetworkBufferPool; import org.apache.flink.runtime.io.network.partition.BufferAvailabilityListener; import org.apache.flink.runtime.io.network.partition.BufferWritingResultPartition; import org.apache.flink.runtime.io.network.partition.InputChannelTestUtils; import org.apache.flink.runtime.io.network.partition.PartitionNotFoundException; import org.apache.flink.runtime.io.network.partition.PartitionTestUtils; import org.apache.flink.runtime.io.network.partition.PipelinedResultPartition; import org.apache.flink.runtime.io.network.partition.ResultPartition; import org.apache.flink.runtime.io.network.partition.ResultPartitionBuilder; import org.apache.flink.runtime.io.network.partition.ResultPartitionID; import org.apache.flink.runtime.io.network.partition.ResultPartitionManager; import org.apache.flink.runtime.io.network.partition.ResultPartitionType; import org.apache.flink.runtime.io.network.partition.ResultSubpartition; import org.apache.flink.runtime.io.network.partition.ResultSubpartitionIndexSet; import org.apache.flink.runtime.io.network.partition.ResultSubpartitionView; import org.apache.flink.runtime.io.network.util.TestBufferFactory; import org.apache.flink.runtime.io.network.util.TestPartitionProducer; import org.apache.flink.runtime.io.network.util.TestProducerSource; import org.apache.flink.runtime.jobgraph.IntermediateResultPartitionID; import org.apache.flink.runtime.state.CheckpointStorageLocationReference; import org.apache.flink.util.concurrent.FutureUtils; import org.apache.flink.util.function.CheckedSupplier; import org.apache.flink.shaded.guava33.com.google.common.collect.Lists; import org.junit.jupiter.api.Test; import org.mockito.stubbing.Answer; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.Timer; import java.util.TimerTask; import java.util.concurrent.Callable; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicInteger; import static org.apache.flink.runtime.io.network.buffer.BufferBuilderTestUtils.createFilledFinishedBufferConsumer; import static org.apache.flink.runtime.io.network.partition.InputChannelTestUtils.createLocalInputChannel; import static org.apache.flink.runtime.io.network.partition.InputChannelTestUtils.createSingleInputGate; import static org.apache.flink.runtime.io.network.partition.InputGateFairnessTest.setupInputGate; import static org.apache.flink.runtime.io.network.partition.consumer.SingleInputGateTest.TestingResultPartitionManager; import static org.apache.flink.runtime.state.CheckpointStorageLocationReference.getDefault; import static org.apache.flink.util.Preconditions.checkArgument; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** Tests for the {@link LocalInputChannel}. */ class LocalInputChannelTest { @Test void testNoDataPersistedAfterReceivingAlignedBarrier() throws Exception { CheckpointBarrier barrier = new CheckpointBarrier( 1L, 0L, CheckpointOptions.alignedWithTimeout( CheckpointType.CHECKPOINT, getDefault(), 123L)); BufferConsumer barrierHolder = EventSerializer.toBufferConsumer(barrier, false); BufferConsumer data = createFilledFinishedBufferConsumer(1); RecordingChannelStateWriter stateWriter = new RecordingChannelStateWriter(); LocalInputChannel channel = InputChannelBuilder.newBuilder() .setPartitionManager( new TestingResultPartitionManager( InputChannelTestUtils.createResultSubpartitionView( barrierHolder, data))) .setStateWriter(stateWriter) .buildLocalChannel(new SingleInputGateBuilder().build()); channel.requestSubpartitions(); // pull AC barrier channel.getNextBuffer(); // pretend that alignment timed out stateWriter.start(barrier.getId(), barrier.getCheckpointOptions()); channel.checkpointStarted(barrier); // pull data channel.getNextBuffer(); assertThat(stateWriter.getAddedInput().isEmpty()) .withFailMessage("no data should be persisted after receiving a barrier") .isTrue(); } /** * Tests the consumption of multiple subpartitions via local input channels. * * <p>Multiple producer tasks produce pipelined partitions, which are consumed by multiple tasks * via local input channels. */ @Test void testConcurrentConsumeMultiplePartitions() throws Exception { // Config final int parallelism = 32; final int producerBufferPoolSize = parallelism + 1; final int numberOfBuffersPerChannel = 1024; // Setup // One thread per produced partition and one per consumer final ExecutorService executor = Executors.newFixedThreadPool(2 * parallelism); final NetworkBufferPool networkBuffers = new NetworkBufferPool( (parallelism * producerBufferPoolSize) + (parallelism * parallelism), TestBufferFactory.BUFFER_SIZE); final ResultPartitionManager partitionManager = new ResultPartitionManager(); final ResultPartitionID[] partitionIds = new ResultPartitionID[parallelism]; final TestPartitionProducer[] partitionProducers = new TestPartitionProducer[parallelism]; // Create all partitions for (int i = 0; i < parallelism; i++) { partitionIds[i] = new ResultPartitionID(); final ResultPartition partition = new ResultPartitionBuilder() .setResultPartitionId(partitionIds[i]) .setNumberOfSubpartitions(parallelism) .setNumTargetKeyGroups(parallelism) .setResultPartitionManager(partitionManager) .setBufferPoolFactory( () -> networkBuffers.createBufferPool( producerBufferPoolSize, producerBufferPoolSize, parallelism, Integer.MAX_VALUE, 0)) .build(); // Create a buffer pool for this partition partition.setup(); // Create the producer partitionProducers[i] = new TestPartitionProducer( (BufferWritingResultPartition) partition, false, new TestPartitionProducerBufferSource( parallelism, TestBufferFactory.BUFFER_SIZE, numberOfBuffersPerChannel)); } // Test try { // Submit producer tasks List<CompletableFuture<?>> results = Lists.newArrayListWithCapacity(parallelism + 1); for (int i = 0; i < parallelism; i++) { results.add( CompletableFuture.supplyAsync( CheckedSupplier.unchecked(partitionProducers[i]::call), executor)); } // Submit consumer for (int i = 0; i < parallelism; i++) { final TestLocalInputChannelConsumer consumer = new TestLocalInputChannelConsumer( i, parallelism, numberOfBuffersPerChannel, networkBuffers.createBufferPool(parallelism, parallelism), partitionManager, new TaskEventDispatcher(), partitionIds); results.add( CompletableFuture.supplyAsync( CheckedSupplier.unchecked(consumer::call), executor)); } FutureUtils.waitForAll(results).get(); } finally { networkBuffers.destroyAllBufferPools(); networkBuffers.destroy(); executor.shutdown(); } } @Test void testPartitionRequestExponentialBackoff() throws Exception { // Config int initialBackoff = 500; int maxBackoff = 3000; // Start with initial backoff, then keep doubling, and cap at max. int[] expectedDelays = {initialBackoff, 1000, 2000, maxBackoff}; // Setup SingleInputGate inputGate = mock(SingleInputGate.class); BufferProvider bufferProvider = mock(BufferProvider.class); when(inputGate.getBufferProvider()).thenReturn(bufferProvider); ResultPartitionManager partitionManager = mock(ResultPartitionManager.class); LocalInputChannel ch = createLocalInputChannel(inputGate, partitionManager, initialBackoff, maxBackoff); when(partitionManager.createSubpartitionView( eq(ch.partitionId), any(ResultSubpartitionIndexSet.class), any(BufferAvailabilityListener.class))) .thenThrow(new PartitionNotFoundException(ch.partitionId)); Timer timer = mock(Timer.class); doAnswer( (Answer<Void>) invocation -> { ((TimerTask) invocation.getArguments()[0]).run(); return null; }) .when(timer) .schedule(any(TimerTask.class), anyLong()); // Initial request ch.requestSubpartitions(); verify(partitionManager) .createSubpartitionView( eq(ch.partitionId), any(ResultSubpartitionIndexSet.class), any(BufferAvailabilityListener.class)); // Request subpartition and verify that the actual requests are delayed. for (long expected : expectedDelays) { ch.retriggerSubpartitionRequest(timer); verify(timer).schedule(any(TimerTask.class), eq(expected)); } // Exception after backoff is greater than the maximum backoff. ch.retriggerSubpartitionRequest(timer); assertThatThrownBy(ch::getNextBuffer); } @Test void testProducerFailedException() throws Exception { ResultSubpartitionView view = mock(ResultSubpartitionView.class); when(view.isReleased()).thenReturn(true); when(view.getFailureCause()).thenReturn(new Exception("Expected test exception")); ResultPartitionManager partitionManager = mock(ResultPartitionManager.class); when(partitionManager.createSubpartitionView( any(ResultPartitionID.class), any(ResultSubpartitionIndexSet.class), any(BufferAvailabilityListener.class))) .thenReturn(view); SingleInputGate inputGate = mock(SingleInputGate.class); BufferProvider bufferProvider = mock(BufferProvider.class); when(inputGate.getBufferProvider()).thenReturn(bufferProvider); LocalInputChannel ch = createLocalInputChannel(inputGate, partitionManager); ch.requestSubpartitions(); // Should throw an instance of CancelTaskException. assertThatThrownBy(ch::getNextBuffer).isInstanceOf(CancelTaskException.class); } /** * Tests that {@link LocalInputChannel#requestSubpartitions()} throws {@link * PartitionNotFoundException} if the result partition was not registered in {@link * ResultPartitionManager} and no backoff. */ @Test void testPartitionNotFoundExceptionWhileRequestingPartition() throws Exception { final SingleInputGate inputGate = createSingleInputGate(1); final LocalInputChannel localChannel = createLocalInputChannel(inputGate, new ResultPartitionManager()); assertThatThrownBy(localChannel::requestSubpartitions) .isInstanceOfSatisfying( PartitionNotFoundException.class, notFound -> assertThat(localChannel.getPartitionId()) .isEqualTo(notFound.getPartitionId())); } /** * Tests that {@link SingleInputGate#retriggerPartitionRequest(IntermediateResultPartitionID)} * is triggered after {@link LocalInputChannel#requestSubpartitions()} throws {@link * PartitionNotFoundException} within backoff. */ @Test void testRetriggerPartitionRequestWhilePartitionNotFound() throws Exception { final SingleInputGate inputGate = createSingleInputGate(1); final LocalInputChannel localChannel = createLocalInputChannel(inputGate, new ResultPartitionManager(), 1, 1); inputGate.setInputChannels(localChannel); localChannel.requestSubpartitions(); // The timer should be initialized at the first time of retriggering partition request. assertThat(inputGate.getRetriggerLocalRequestTimer()).isNotNull(); } /** * Tests that {@link LocalInputChannel#retriggerSubpartitionRequest(Timer)} would throw {@link * PartitionNotFoundException} which is set onto the input channel then. */ @Test void testChannelErrorWhileRetriggeringRequest() { final SingleInputGate inputGate = createSingleInputGate(1); final LocalInputChannel localChannel = createLocalInputChannel(inputGate, new ResultPartitionManager()); final Timer timer = new Timer(true) { @Override public void schedule(TimerTask task, long delay) { task.run(); assertThatThrownBy(localChannel::checkError) .isInstanceOfSatisfying( PartitionNotFoundException.class, notFound -> assertThat(localChannel.partitionId) .isEqualTo(notFound.getPartitionId())); } }; try { localChannel.retriggerSubpartitionRequest(timer); } finally { timer.cancel(); } } /** * Verifies that concurrent release via the SingleInputGate and re-triggering of a partition * request works smoothly. * * <ul> * <li>SingleInputGate acquires its request lock and tries to release all registered channels. * When releasing a channel, it needs to acquire the channel's shared request-release * lock. * <li>If a LocalInputChannel concurrently retriggers a partition request via a Timer Thread * it acquires the channel's request-release lock and calls the retrigger callback on the * SingleInputGate, which again tries to acquire the gate's request lock. * </ul> * * <p>For certain timings this obviously leads to a deadlock. This test reliably reproduced such * a timing (reported in FLINK-5228). This test is pretty much testing the buggy implementation * and has not much more general value. If it becomes obsolete at some point (future greatness * ;)), feel free to remove it. * * <p>The fix in the end was to not acquire the channels lock when releasing it and/or not doing * any input gate callbacks while holding the channel's lock. I decided to do both. */ @Test void testConcurrentReleaseAndRetriggerPartitionRequest() throws Exception { final SingleInputGate gate = createSingleInputGate(1); ResultPartitionManager partitionManager = mock(ResultPartitionManager.class); when(partitionManager.createSubpartitionView( any(ResultPartitionID.class), any(ResultSubpartitionIndexSet.class), any(BufferAvailabilityListener.class))) .thenAnswer( (Answer<ResultSubpartitionView>) invocationOnMock -> { // Sleep here a little to give the releaser Thread // time to acquire the input gate lock. We throw // the Exception to retrigger the request. Thread.sleep(100); throw new PartitionNotFoundException(new ResultPartitionID()); }); final LocalInputChannel channel = createLocalInputChannel(gate, partitionManager, 1, 1); Thread releaser = new Thread( () -> { try { gate.close(); } catch (IOException ignored) { } }); Thread requester = new Thread( () -> { try { channel.requestSubpartitions(); } catch (IOException ignored) { } }); requester.start(); releaser.start(); releaser.join(); requester.join(); } /** * Tests that reading from a channel when after the partition has been released are handled and * don't lead to NPEs. */ @Test void testGetNextAfterPartitionReleased() throws Exception { ResultSubpartitionView subpartitionView = InputChannelTestUtils.createResultSubpartitionView(false); TestingResultPartitionManager partitionManager = new TestingResultPartitionManager(subpartitionView); LocalInputChannel channel = createLocalInputChannel(new SingleInputGateBuilder().build(), partitionManager); channel.requestSubpartitions(); assertThat(channel.getNextBuffer()).isNotPresent(); // release the subpartition view subpartitionView.releaseAllResources(); assertThatThrownBy(channel::getNextBuffer).isInstanceOf(CancelTaskException.class); channel.releaseAllResources(); assertThat(channel.getNextBuffer()).isNotPresent(); } /** Verifies that buffer is not compressed when getting from a {@link LocalInputChannel}. */ @Test void testGetBufferFromLocalChannelWhenCompressionEnabled() throws Exception { ResultSubpartitionView subpartitionView = InputChannelTestUtils.createResultSubpartitionView(true); TestingResultPartitionManager partitionManager = new TestingResultPartitionManager(subpartitionView); LocalInputChannel channel = createLocalInputChannel(new SingleInputGateBuilder().build(), partitionManager); // request partition and get next buffer channel.requestSubpartitions(); Optional<InputChannel.BufferAndAvailability> bufferAndAvailability = channel.getNextBuffer(); assertThat(bufferAndAvailability) .hasValueSatisfying(value -> assertThat(value.buffer().isCompressed()).isFalse()); } @Test void testUnblockReleasedChannel() throws Exception { SingleInputGate inputGate = createSingleInputGate(1); LocalInputChannel localChannel = createLocalInputChannel(inputGate, new ResultPartitionManager()); localChannel.releaseAllResources(); assertThatThrownBy(localChannel::resumeConsumption) .isInstanceOf(IllegalStateException.class); } @Test void testAnnounceBufferSize() throws Exception { // given: Initialized local input channel. AtomicInteger lastBufferSize = new AtomicInteger(0); TestingResultPartitionManager partitionManager = new TestingResultPartitionManager( InputChannelTestUtils.createResultSubpartitionView(true)); SingleInputGate inputGate = createSingleInputGate(1); LocalInputChannel localChannel = createLocalInputChannel(inputGate, partitionManager); localChannel.requestSubpartitions(); localChannel.announceBufferSize(10); // when: Release all resources. localChannel.releaseAllResources(); // then: Announcement buffer size should lead to exception. assertThatThrownBy(() -> localChannel.announceBufferSize(12)) .isInstanceOf(IllegalStateException.class); } @Test void testEnqueueAvailableChannelWhenResuming() throws IOException, InterruptedException { PipelinedResultPartition parent = (PipelinedResultPartition) PartitionTestUtils.createPartition( ResultPartitionType.PIPELINED, NoOpFileChannelManager.INSTANCE); ResultSubpartition subpartition = parent.getAllPartitions()[0]; ResultSubpartitionView subpartitionView = subpartition.createReadView((ResultSubpartitionView view) -> {}); TestingResultPartitionManager partitionManager = new TestingResultPartitionManager(subpartitionView); LocalInputChannel channel = createLocalInputChannel(new SingleInputGateBuilder().build(), partitionManager); channel.requestSubpartitions(); // Block the subpartition subpartition.add( EventSerializer.toBufferConsumer( new CheckpointBarrier( 1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), false)); assertThat(channel.getNextBuffer()).isPresent(); // Add more data subpartition.add(createFilledFinishedBufferConsumer(4096)); subpartition.flush(); // No buffer since the subpartition is blocked. assertThat(channel.inputGate.pollNext()).isNotPresent(); // Resumption makes the subpartition available. channel.resumeConsumption(); Optional<BufferOrEvent> nextBuffer = channel.inputGate.pollNext(); assertThat(nextBuffer).hasValueSatisfying(value -> assertThat(value.isBuffer()).isTrue()); } @Test void testCheckpointingInflightData() throws Exception { SingleInputGate inputGate = new SingleInputGateBuilder().build(); PipelinedResultPartition parent = (PipelinedResultPartition) PartitionTestUtils.createPartition( ResultPartitionType.PIPELINED, NoOpFileChannelManager.INSTANCE); ResultSubpartition subpartition = parent.getAllPartitions()[0]; ResultSubpartitionView subpartitionView = subpartition.createReadView((ResultSubpartitionView view) -> {}); TestingResultPartitionManager partitionManager = new TestingResultPartitionManager(subpartitionView); final RecordingChannelStateWriter stateWriter = new RecordingChannelStateWriter(); LocalInputChannel channel = createLocalInputChannel( inputGate, partitionManager, 0, 0, b -> b.setStateWriter(stateWriter)); inputGate.setInputChannels(channel); channel.requestSubpartitions(); final CheckpointStorageLocationReference location = getDefault(); CheckpointOptions options = CheckpointOptions.unaligned(CheckpointType.CHECKPOINT, location); stateWriter.start(0, options); final CheckpointBarrier barrier = new CheckpointBarrier(0, 123L, options); channel.checkpointStarted(barrier); // add 1 buffer before barrier and 1 buffer afterwards. Only the first buffer should be // written. subpartition.add(createFilledFinishedBufferConsumer(1)); assertThat(channel.getNextBuffer()).isPresent(); subpartition.add(EventSerializer.toBufferConsumer(barrier, true)); assertThat(channel.getNextBuffer()).isPresent(); subpartition.add(createFilledFinishedBufferConsumer(2)); assertThat(channel.getNextBuffer()).isPresent(); assertThat( stateWriter.getAddedInput().get(channel.getChannelInfo()).stream() .mapToInt(Buffer::getSize) .toArray()) .containsExactly(1); } @Test void testAnnounceNewBufferSize() throws IOException, InterruptedException { // given: Configured LocalInputChannel and pipelined subpartition. PipelinedResultPartition parent = (PipelinedResultPartition) new ResultPartitionBuilder() .setResultPartitionType(ResultPartitionType.PIPELINED) .setFileChannelManager(NoOpFileChannelManager.INSTANCE) .setNumberOfSubpartitions(2) .build(); ResultSubpartition subpartition0 = parent.getAllPartitions()[0]; ResultSubpartition subpartition1 = parent.getAllPartitions()[1]; LocalInputChannel channel0 = createLocalInputChannel( new SingleInputGateBuilder().build(), new TestingResultPartitionManager( subpartition0.createReadView((ResultSubpartitionView view) -> {}))); LocalInputChannel channel1 = createLocalInputChannel( new SingleInputGateBuilder().build(), new TestingResultPartitionManager( subpartition1.createReadView((ResultSubpartitionView view) -> {}))); channel0.requestSubpartitions(); channel1.requestSubpartitions(); // and: Preferable buffer size is default value. assertThat(subpartition0.add(createFilledFinishedBufferConsumer(16))) .isEqualTo(Integer.MAX_VALUE); assertThat(subpartition1.add(createFilledFinishedBufferConsumer(16))) .isEqualTo(Integer.MAX_VALUE); // when: Announce the different buffer size for different channels via LocalInputChannel. channel0.announceBufferSize(9); channel1.announceBufferSize(20); // then: The corresponded subpartitions have the new size. assertThat(subpartition0.add(createFilledFinishedBufferConsumer(16))).isEqualTo(9); assertThat(subpartition1.add(createFilledFinishedBufferConsumer(16))).isEqualTo(20); } @Test void testReceivingBuffersInUseBeforeSubpartitionViewInitialization() throws Exception { // given: Local input channel without initialized subpartition view. ResultSubpartitionView subpartitionView = InputChannelTestUtils.createResultSubpartitionView( createFilledFinishedBufferConsumer(4096), createFilledFinishedBufferConsumer(4096), createFilledFinishedBufferConsumer(4096)); TestingResultPartitionManager partitionManager = new TestingResultPartitionManager(subpartitionView); final SingleInputGate inputGate = createSingleInputGate(1); final LocalInputChannel localChannel = createLocalInputChannel(inputGate, partitionManager); inputGate.setInputChannels(localChannel); // then: Buffers in use should be equal to 0 until subpartition view initialization. assertThat(localChannel.getBuffersInUseCount()).isZero(); // when: The subpartition view is initialized. localChannel.requestSubpartitions(); // then: Buffers in use should show correct value. assertThat(localChannel.getBuffersInUseCount()).isEqualTo(3); } // --------------------------------------------------------------------------------------------- /** Returns the configured number of buffers for each channel in a random order. */ private static class TestPartitionProducerBufferSource implements TestProducerSource { private final int bufferSize; private final List<Byte> channelIndexes; TestPartitionProducerBufferSource( int parallelism, int bufferSize, int numberOfBuffersToProduce) { this.bufferSize = bufferSize; this.channelIndexes = Lists.newArrayListWithCapacity(parallelism * numberOfBuffersToProduce); // Array of channel indexes to produce buffers for for (byte i = 0; i < parallelism; i++) { for (int j = 0; j < numberOfBuffersToProduce; j++) { channelIndexes.add(i); } } // Random buffer to channel ordering Collections.shuffle(channelIndexes); } @Override public BufferAndChannel getNextBuffer() throws Exception { if (channelIndexes.size() > 0) { final int channelIndex = channelIndexes.remove(0); return new BufferAndChannel(new byte[bufferSize], channelIndex); } return null; } } /** * Consumed the configured result partitions and verifies that each channel receives the * expected number of buffers. */ private static class TestLocalInputChannelConsumer implements Callable<Void> { private final SingleInputGate inputGate; private final int numberOfInputChannels; private final int numberOfExpectedBuffersPerChannel; TestLocalInputChannelConsumer( int subpartitionIndex, int numberOfInputChannels, int numberOfExpectedBuffersPerChannel, BufferPool bufferPool, ResultPartitionManager partitionManager, TaskEventDispatcher taskEventDispatcher, ResultPartitionID[] consumedPartitionIds) throws IOException { checkArgument(numberOfInputChannels >= 1); checkArgument(numberOfExpectedBuffersPerChannel >= 1); this.inputGate = new SingleInputGateBuilder() .setNumberOfChannels(numberOfInputChannels) .setBufferPoolFactory(bufferPool) .build(); InputChannel[] inputChannels = new InputChannel[numberOfInputChannels]; // Setup input channels for (int i = 0; i < numberOfInputChannels; i++) { inputChannels[i] = InputChannelBuilder.newBuilder() .setChannelIndex(i) .setSubpartitionIndexSet( new ResultSubpartitionIndexSet(subpartitionIndex)) .setPartitionManager(partitionManager) .setPartitionId(consumedPartitionIds[i]) .setTaskEventPublisher(taskEventDispatcher) .buildLocalChannel(inputGate); } setupInputGate(inputGate, inputChannels); this.numberOfInputChannels = numberOfInputChannels; this.numberOfExpectedBuffersPerChannel = numberOfExpectedBuffersPerChannel; } @Override public Void call() throws Exception { // One counter per input channel. Expect the same number of buffers from each channel. final int[] numberOfBuffersPerChannel = new int[numberOfInputChannels]; try { Optional<BufferOrEvent> boe; while ((boe = inputGate.getNext()).isPresent()) { if (boe.get().isBuffer()) { boe.get().getBuffer().recycleBuffer(); // Check that we don't receive too many buffers if (++numberOfBuffersPerChannel[ boe.get().getChannelInfo().getInputChannelIdx()] > numberOfExpectedBuffersPerChannel) { throw new IllegalStateException( "Received more buffers than expected " + "on channel " + boe.get().getChannelInfo() + "."); } } } // Verify that we received the expected number of buffers on each channel for (int i = 0; i < numberOfBuffersPerChannel.length; i++) { final int actualNumberOfReceivedBuffers = numberOfBuffersPerChannel[i]; if (actualNumberOfReceivedBuffers != numberOfExpectedBuffersPerChannel) { throw new IllegalStateException( "Received unexpected number of buffers " + "on channel " + i + " (" + actualNumberOfReceivedBuffers + " instead " + "of " + numberOfExpectedBuffersPerChannel + ")."); } } } finally { inputGate.close(); } return null; } } }
apache/fory
36,368
java/fory-core/src/main/java/org/apache/fory/serializer/collection/MapLikeSerializer.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.fory.serializer.collection; import static org.apache.fory.serializer.collection.MapFlags.KEY_DECL_TYPE; import static org.apache.fory.serializer.collection.MapFlags.KEY_HAS_NULL; import static org.apache.fory.serializer.collection.MapFlags.KV_NULL; import static org.apache.fory.serializer.collection.MapFlags.NULL_KEY_VALUE_DECL_TYPE; import static org.apache.fory.serializer.collection.MapFlags.NULL_KEY_VALUE_DECL_TYPE_TRACKING_REF; import static org.apache.fory.serializer.collection.MapFlags.NULL_VALUE_KEY_DECL_TYPE; import static org.apache.fory.serializer.collection.MapFlags.NULL_VALUE_KEY_DECL_TYPE_TRACKING_REF; import static org.apache.fory.serializer.collection.MapFlags.TRACKING_KEY_REF; import static org.apache.fory.serializer.collection.MapFlags.TRACKING_VALUE_REF; import static org.apache.fory.serializer.collection.MapFlags.VALUE_DECL_TYPE; import static org.apache.fory.serializer.collection.MapFlags.VALUE_HAS_NULL; import static org.apache.fory.type.TypeUtils.MAP_TYPE; import com.google.common.collect.ImmutableMap.Builder; import java.lang.invoke.MethodHandle; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import org.apache.fory.Fory; import org.apache.fory.annotation.CodegenInvoke; import org.apache.fory.collection.IdentityMap; import org.apache.fory.collection.Tuple2; import org.apache.fory.memory.MemoryBuffer; import org.apache.fory.reflect.ReflectionUtils; import org.apache.fory.reflect.TypeRef; import org.apache.fory.resolver.ClassInfo; import org.apache.fory.resolver.ClassInfoHolder; import org.apache.fory.resolver.RefResolver; import org.apache.fory.resolver.TypeResolver; import org.apache.fory.serializer.Serializer; import org.apache.fory.type.GenericType; import org.apache.fory.type.Generics; import org.apache.fory.type.TypeUtils; /** Serializer for all map-like objects. */ @SuppressWarnings({"unchecked", "rawtypes"}) public abstract class MapLikeSerializer<T> extends Serializer<T> { public static final int MAX_CHUNK_SIZE = 255; protected MethodHandle constructor; protected final boolean supportCodegenHook; private Serializer keySerializer; private Serializer valueSerializer; protected final ClassInfoHolder keyClassInfoWriteCache; protected final ClassInfoHolder keyClassInfoReadCache; protected final ClassInfoHolder valueClassInfoWriteCache; protected final ClassInfoHolder valueClassInfoReadCache; // support map subclass whose key or value generics only are available, // or one of types is already instantiated in subclass, ex: `Subclass<T> implements Map<String, // T>` private final IdentityMap<GenericType, GenericType> partialGenericKVTypeMap; private final GenericType objType; // For subclass whose kv type are instantiated already, such as // `Subclass implements Map<String, Long>`. If declared `Map` doesn't specify // instantiated kv type, then the serialization will need to write those kv // types. Although we can extract this generics when creating the serializer, // we can't do it when jit `Serializer` for some class which contains one of such map // field. So we will write those extra kv classes to keep protocol consistency between // interpreter and jit mode although it seems unnecessary. // With kv header in future, we can write this kv classes only once, the cost won't be too much. private int numElements; private final TypeResolver typeResolver; protected final SerializationBinding binding; public MapLikeSerializer(Fory fory, Class<T> cls) { this(fory, cls, !ReflectionUtils.isDynamicGeneratedCLass(cls)); } public MapLikeSerializer(Fory fory, Class<T> cls, boolean supportCodegenHook) { this(fory, cls, supportCodegenHook, false); } public MapLikeSerializer(Fory fory, Class<T> cls, boolean supportCodegenHook, boolean immutable) { super(fory, cls, immutable); this.typeResolver = fory.isCrossLanguage() ? fory.getXtypeResolver() : fory.getClassResolver(); this.supportCodegenHook = supportCodegenHook; keyClassInfoWriteCache = typeResolver.nilClassInfoHolder(); keyClassInfoReadCache = typeResolver.nilClassInfoHolder(); valueClassInfoWriteCache = typeResolver.nilClassInfoHolder(); valueClassInfoReadCache = typeResolver.nilClassInfoHolder(); partialGenericKVTypeMap = new IdentityMap<>(); objType = typeResolver.buildGenericType(Object.class); binding = SerializationBinding.createBinding(fory); } /** * Set key serializer for next serialization, the <code>serializer</code> will be cleared when * next serialization finished. */ public void setKeySerializer(Serializer keySerializer) { this.keySerializer = keySerializer; } /** * Set value serializer for next serialization, the <code>serializer</code> will be cleared when * next serialization finished. */ public void setValueSerializer(Serializer valueSerializer) { this.valueSerializer = valueSerializer; } @Override public void write(MemoryBuffer buffer, T value) { Map map = onMapWrite(buffer, value); Serializer keySerializer = this.keySerializer; Serializer valueSerializer = this.valueSerializer; // clear the elemSerializer to avoid conflict if the nested // serialization has collection field. // TODO use generics for compatible serializer. this.keySerializer = null; this.valueSerializer = null; if (map.isEmpty()) { return; } TypeResolver classResolver = typeResolver; Iterator<Entry<Object, Object>> iterator = map.entrySet().iterator(); Entry<Object, Object> entry = iterator.next(); while (entry != null) { if (keySerializer != null || valueSerializer != null) { entry = writeJavaNullChunk(buffer, entry, iterator, keySerializer, valueSerializer); if (entry != null) { entry = writeJavaChunk( classResolver, buffer, entry, iterator, keySerializer, valueSerializer); } } else { Generics generics = fory.getGenerics(); GenericType genericType = generics.nextGenericType(); if (genericType == null) { entry = writeJavaNullChunk(buffer, entry, iterator, null, null); if (entry != null) { entry = writeJavaChunk(classResolver, buffer, entry, iterator, null, null); } } else { if (genericType.getTypeParametersCount() < 2) { genericType = getKVGenericType(genericType); } GenericType keyGenericType = genericType.getTypeParameter0(); GenericType valueGenericType = genericType.getTypeParameter1(); entry = writeJavaNullChunkGeneric(buffer, entry, iterator, keyGenericType, valueGenericType); if (entry != null) { entry = writeJavaChunkGeneric( classResolver, generics, genericType, buffer, entry, iterator); } } } } onMapWriteFinish(map); } @Override public void xwrite(MemoryBuffer buffer, T value) { write(buffer, value); } public final Entry writeJavaNullChunk( MemoryBuffer buffer, Entry entry, Iterator<Entry<Object, Object>> iterator, Serializer keySerializer, Serializer valueSerializer) { while (true) { Object key = entry.getKey(); Object value = entry.getValue(); if (key != null) { if (value != null) { return entry; } writeNullValueChunk(buffer, keySerializer, key); } else { writeNullKeyChunk(buffer, valueSerializer, value); } if (iterator.hasNext()) { entry = iterator.next(); } else { return null; } } } private void writeNullValueChunk(MemoryBuffer buffer, Serializer keySerializer, Object key) { // noinspection Duplicates if (keySerializer != null) { if (keySerializer.needToWriteRef()) { buffer.writeByte(NULL_VALUE_KEY_DECL_TYPE_TRACKING_REF); binding.writeRef(buffer, key, keySerializer); } else { buffer.writeByte(NULL_VALUE_KEY_DECL_TYPE); binding.write(buffer, keySerializer, key); } } else { buffer.writeByte(VALUE_HAS_NULL | TRACKING_KEY_REF); binding.writeRef(buffer, key, keyClassInfoWriteCache); } } /** * Write chunk of size 1, the key is null. Since we can have at most one key whose value is null, * this method is not in critical path, make it as a separate method to let caller eligible for * jit inline. */ private void writeNullKeyChunk(MemoryBuffer buffer, Serializer valueSerializer, Object value) { if (value != null) { // noinspection Duplicates if (valueSerializer != null) { if (valueSerializer.needToWriteRef()) { buffer.writeByte(NULL_KEY_VALUE_DECL_TYPE_TRACKING_REF); binding.writeRef(buffer, value, valueSerializer); } else { buffer.writeByte(NULL_KEY_VALUE_DECL_TYPE); binding.write(buffer, valueSerializer, value); } } else { buffer.writeByte(KEY_HAS_NULL | TRACKING_VALUE_REF); binding.writeRef(buffer, value, valueClassInfoWriteCache); } } else { buffer.writeByte(KV_NULL); } } @CodegenInvoke public final Entry writeNullChunkKVFinalNoRef( MemoryBuffer buffer, Entry entry, Iterator<Entry<Object, Object>> iterator, Serializer keySerializer, Serializer valueSerializer) { while (true) { Object key = entry.getKey(); Object value = entry.getValue(); if (key != null) { if (value != null) { return entry; } buffer.writeByte(NULL_VALUE_KEY_DECL_TYPE); binding.write(buffer, keySerializer, key); } else { writeNullKeyChunk(buffer, valueSerializer, value); } if (iterator.hasNext()) { entry = iterator.next(); } else { return null; } } } public final Entry writeJavaNullChunkGeneric( MemoryBuffer buffer, Entry entry, Iterator<Entry<Object, Object>> iterator, GenericType keyType, GenericType valueType) { while (true) { Object key = entry.getKey(); Object value = entry.getValue(); if (key != null) { if (value != null) { return entry; } writeKeyForNullValueChunkGeneric(buffer, key, keyType); } else { writeValueForNullKeyChunkGeneric(buffer, value, valueType); } if (iterator.hasNext()) { entry = iterator.next(); } else { return null; } } } private void writeKeyForNullValueChunkGeneric( MemoryBuffer buffer, Object key, GenericType keyType) { if (!keyType.isMonomorphic()) { buffer.writeByte(VALUE_HAS_NULL | TRACKING_KEY_REF); binding.writeRef(buffer, key, keyClassInfoWriteCache); return; } Serializer serializer = keyType.getSerializer(typeResolver); if (keyType.hasGenericParameters()) { fory.getGenerics().pushGenericType(keyType); fory.incDepth(1); } if (serializer.needToWriteRef()) { buffer.writeByte(NULL_VALUE_KEY_DECL_TYPE_TRACKING_REF); binding.writeRef(buffer, key, serializer); } else { buffer.writeByte(NULL_VALUE_KEY_DECL_TYPE); binding.write(buffer, serializer, key); } if (keyType.hasGenericParameters()) { fory.incDepth(-1); fory.getGenerics().popGenericType(); } } private void writeValueForNullKeyChunkGeneric( MemoryBuffer buffer, Object value, GenericType valueType) { if (!valueType.isMonomorphic()) { buffer.writeByte(KEY_HAS_NULL | TRACKING_VALUE_REF); binding.writeRef(buffer, value, valueClassInfoWriteCache); return; } Serializer serializer = valueType.getSerializer(typeResolver); if (valueType.hasGenericParameters()) { fory.getGenerics().pushGenericType(valueType); fory.incDepth(1); } if (serializer.needToWriteRef()) { buffer.writeByte(NULL_KEY_VALUE_DECL_TYPE_TRACKING_REF); binding.writeRef(buffer, value, serializer); } else { buffer.writeByte(NULL_KEY_VALUE_DECL_TYPE); binding.write(buffer, serializer, value); } if (valueType.hasGenericParameters()) { fory.incDepth(-1); fory.getGenerics().popGenericType(); } } // Make byte code of this method smaller than 325 for better jit inline private Entry writeJavaChunk( TypeResolver classResolver, MemoryBuffer buffer, Entry<Object, Object> entry, Iterator<Entry<Object, Object>> iterator, Serializer keySerializer, Serializer valueSerializer) { Object key = entry.getKey(); Object value = entry.getValue(); Class keyType = key.getClass(); Class valueType = value.getClass(); // place holder for chunk header and size. buffer.writeInt16((short) -1); int chunkSizeOffset = buffer.writerIndex() - 1; int chunkHeader = 0; if (keySerializer != null) { chunkHeader |= KEY_DECL_TYPE; } else { keySerializer = writeKeyClassInfo(classResolver, keyType, buffer); } if (valueSerializer != null) { chunkHeader |= VALUE_DECL_TYPE; } else { valueSerializer = writeValueClassInfo(classResolver, valueType, buffer); } // noinspection Duplicates boolean keyWriteRef = keySerializer.needToWriteRef(); boolean valueWriteRef = valueSerializer.needToWriteRef(); if (keyWriteRef) { chunkHeader |= TRACKING_KEY_REF; } if (valueWriteRef) { chunkHeader |= TRACKING_VALUE_REF; } buffer.putByte(chunkSizeOffset - 1, (byte) chunkHeader); RefResolver refResolver = fory.getRefResolver(); // Use int to make chunk size representable for 0~255 instead of 0~127. int chunkSize = 0; while (true) { if (key == null || value == null || (key.getClass() != keyType) || (value.getClass() != valueType)) { break; } if (!keyWriteRef || !refResolver.writeRefOrNull(buffer, key)) { binding.write(buffer, keySerializer, key); } if (!valueWriteRef || !refResolver.writeRefOrNull(buffer, value)) { binding.write(buffer, valueSerializer, value); } // noinspection Duplicates ++chunkSize; if (iterator.hasNext()) { entry = iterator.next(); key = entry.getKey(); value = entry.getValue(); } else { entry = null; break; } if (chunkSize == MAX_CHUNK_SIZE) { break; } } buffer.putByte(chunkSizeOffset, (byte) chunkSize); return entry; } private Serializer writeKeyClassInfo( TypeResolver classResolver, Class keyType, MemoryBuffer buffer) { ClassInfo classInfo = classResolver.getClassInfo(keyType, keyClassInfoWriteCache); classResolver.writeClassInfo(buffer, classInfo); return classInfo.getSerializer(); } private Serializer writeValueClassInfo( TypeResolver classResolver, Class valueType, MemoryBuffer buffer) { ClassInfo classInfo = classResolver.getClassInfo(valueType, valueClassInfoWriteCache); classResolver.writeClassInfo(buffer, classInfo); return classInfo.getSerializer(); } @CodegenInvoke public Entry writeJavaChunkGeneric( TypeResolver classResolver, Generics generics, GenericType genericType, MemoryBuffer buffer, Entry<Object, Object> entry, Iterator<Entry<Object, Object>> iterator) { // type parameters count for `Map field` will be 0; // type parameters count for `SubMap<V> field` which SubMap is // `SubMap<V> implements Map<String, V>` will be 1; if (genericType.getTypeParametersCount() < 2) { genericType = getKVGenericType(genericType); } GenericType keyGenericType = genericType.getTypeParameter0(); GenericType valueGenericType = genericType.getTypeParameter1(); if (keyGenericType == objType && valueGenericType == objType) { return writeJavaChunk(classResolver, buffer, entry, iterator, null, null); } // Can't avoid push generics repeatedly in loop by stack depth, because push two // generic type changed generics stack top, which is depth index, update stack top // and depth will have some cost too. // Stack depth to avoid push generics repeatedly in loop. // Note push two generic type changed generics stack top, which is depth index, // stack top should be updated when using for serialization k/v. // int depth = fory.getDepth(); // // depth + 1 to leave a slot for value generics, otherwise value generics will // // be overwritten by nested key generics. // fory.setDepth(depth + 1); // generics.pushGenericType(keyGenericType); // fory.setDepth(depth); // generics.pushGenericType(valueGenericType); boolean keyGenericTypeFinal = keyGenericType.isMonomorphic(); boolean valueGenericTypeFinal = valueGenericType.isMonomorphic(); Object key = entry.getKey(); Object value = entry.getValue(); Class keyType = key.getClass(); Class valueType = value.getClass(); Serializer keySerializer, valueSerializer; // place holder for chunk header and size. buffer.writeInt16((short) -1); int chunkSizeOffset = buffer.writerIndex() - 1; int chunkHeader = 0; // noinspection Duplicates if (keyGenericTypeFinal) { chunkHeader |= KEY_DECL_TYPE; keySerializer = keyGenericType.getSerializer(classResolver); } else { keySerializer = writeKeyClassInfo(classResolver, keyType, buffer); } if (valueGenericTypeFinal) { chunkHeader |= VALUE_DECL_TYPE; valueSerializer = valueGenericType.getSerializer(classResolver); } else { valueSerializer = writeValueClassInfo(classResolver, valueType, buffer); } boolean keyWriteRef = keySerializer.needToWriteRef(); if (keyWriteRef) { chunkHeader |= TRACKING_KEY_REF; } boolean valueWriteRef = valueSerializer.needToWriteRef(); if (valueWriteRef) { chunkHeader |= TRACKING_VALUE_REF; } buffer.putByte(chunkSizeOffset - 1, (byte) chunkHeader); RefResolver refResolver = fory.getRefResolver(); // Use int to make chunk size representable for 0~255 instead of 0~127. int chunkSize = 0; while (true) { if (key == null || value == null || (key.getClass() != keyType) || (value.getClass() != valueType)) { break; } generics.pushGenericType(keyGenericType); if (!keyWriteRef || !refResolver.writeRefOrNull(buffer, key)) { fory.incDepth(1); binding.write(buffer, keySerializer, key); fory.incDepth(-1); } generics.popGenericType(); generics.pushGenericType(valueGenericType); if (!valueWriteRef || !refResolver.writeRefOrNull(buffer, value)) { fory.incDepth(1); binding.write(buffer, valueSerializer, value); fory.incDepth(-1); } generics.popGenericType(); ++chunkSize; // noinspection Duplicates if (iterator.hasNext()) { entry = iterator.next(); key = entry.getKey(); value = entry.getValue(); } else { entry = null; break; } if (chunkSize == MAX_CHUNK_SIZE) { break; } } buffer.putByte(chunkSizeOffset, (byte) chunkSize); return entry; } private GenericType getKVGenericType(GenericType genericType) { GenericType mapGenericType = partialGenericKVTypeMap.get(genericType); if (mapGenericType == null) { TypeRef<?> typeRef = genericType.getTypeRef(); if (!MAP_TYPE.isSupertypeOf(typeRef)) { mapGenericType = GenericType.build(TypeUtils.mapOf(Object.class, Object.class)); partialGenericKVTypeMap.put(genericType, mapGenericType); return mapGenericType; } Tuple2<TypeRef<?>, TypeRef<?>> mapKeyValueType = TypeUtils.getMapKeyValueType(typeRef); mapGenericType = GenericType.build(TypeUtils.mapOf(mapKeyValueType.f0, mapKeyValueType.f1)); partialGenericKVTypeMap.put(genericType, mapGenericType); } return mapGenericType; } @Override public T xread(MemoryBuffer buffer) { return read(buffer); } protected <K, V> void copyEntry(Map<K, V> originMap, Map<K, V> newMap) { TypeResolver classResolver = typeResolver; for (Map.Entry<K, V> entry : originMap.entrySet()) { K key = entry.getKey(); if (key != null) { ClassInfo classInfo = classResolver.getClassInfo(key.getClass(), keyClassInfoWriteCache); if (!classInfo.getSerializer().isImmutable()) { key = fory.copyObject(key, classInfo.getClassId()); } } V value = entry.getValue(); if (value != null) { ClassInfo classInfo = classResolver.getClassInfo(value.getClass(), valueClassInfoWriteCache); if (!classInfo.getSerializer().isImmutable()) { value = fory.copyObject(value, classInfo.getClassId()); } } newMap.put(key, value); } } protected <K, V> void copyEntry(Map<K, V> originMap, Builder<K, V> builder) { TypeResolver classResolver = typeResolver; for (Entry<K, V> entry : originMap.entrySet()) { K key = entry.getKey(); if (key != null) { ClassInfo classInfo = classResolver.getClassInfo(key.getClass(), keyClassInfoWriteCache); if (!classInfo.getSerializer().isImmutable()) { key = fory.copyObject(key, classInfo.getClassId()); } } V value = entry.getValue(); if (value != null) { ClassInfo classInfo = classResolver.getClassInfo(value.getClass(), valueClassInfoWriteCache); if (!classInfo.getSerializer().isImmutable()) { value = fory.copyObject(value, classInfo.getClassId()); } } builder.put(key, value); } } protected <K, V> void copyEntry(Map<K, V> originMap, Object[] elements) { TypeResolver classResolver = typeResolver; int index = 0; for (Entry<K, V> entry : originMap.entrySet()) { K key = entry.getKey(); if (key != null) { ClassInfo classInfo = classResolver.getClassInfo(key.getClass(), keyClassInfoWriteCache); if (!classInfo.getSerializer().isImmutable()) { key = fory.copyObject(key, classInfo.getClassId()); } } V value = entry.getValue(); if (value != null) { ClassInfo classInfo = classResolver.getClassInfo(value.getClass(), valueClassInfoWriteCache); if (!classInfo.getSerializer().isImmutable()) { value = fory.copyObject(value, classInfo.getClassId()); } } elements[index++] = key; elements[index++] = value; } } @Override public T read(MemoryBuffer buffer) { Map map = newMap(buffer); int size = getAndClearNumElements(); readElements(buffer, size, map); return onMapRead(map); } public void readElements(MemoryBuffer buffer, int size, Map map) { Serializer keySerializer = this.keySerializer; Serializer valueSerializer = this.valueSerializer; // clear the elemSerializer to avoid conflict if the nested // serialization has collection field. // TODO use generics for compatible serializer. this.keySerializer = null; this.valueSerializer = null; int chunkHeader = 0; if (size != 0) { chunkHeader = buffer.readUnsignedByte(); } while (size > 0) { long sizeAndHeader = readJavaNullChunk(buffer, map, chunkHeader, size, keySerializer, valueSerializer); chunkHeader = (int) (sizeAndHeader & 0xff); size = (int) (sizeAndHeader >>> 8); if (size == 0) { break; } if (keySerializer != null || valueSerializer != null) { sizeAndHeader = readJavaChunk(fory, buffer, map, size, chunkHeader, keySerializer, valueSerializer); } else { Generics generics = fory.getGenerics(); GenericType genericType = generics.nextGenericType(); if (genericType == null) { sizeAndHeader = readJavaChunk(fory, buffer, map, size, chunkHeader, null, null); } else { sizeAndHeader = readJavaChunkGeneric(fory, generics, genericType, buffer, map, size, chunkHeader); } } chunkHeader = (int) (sizeAndHeader & 0xff); size = (int) (sizeAndHeader >>> 8); } } public long readJavaNullChunk( MemoryBuffer buffer, Map map, int chunkHeader, long size, Serializer keySerializer, Serializer valueSerializer) { while (true) { boolean keyHasNull = (chunkHeader & KEY_HAS_NULL) != 0; boolean valueHasNull = (chunkHeader & VALUE_HAS_NULL) != 0; if (!keyHasNull) { if (!valueHasNull) { return (size << 8) | chunkHeader; } else { boolean trackKeyRef = (chunkHeader & TRACKING_KEY_REF) != 0; Object key; if ((chunkHeader & KEY_DECL_TYPE) != 0) { if (keySerializer == null) { key = readNonEmptyValueFromNullChunk(buffer, trackKeyRef, true); } else { fory.incReadDepth(); if (trackKeyRef) { key = binding.readRef(buffer, keySerializer); } else { key = binding.read(buffer, keySerializer); } fory.decDepth(); } } else { key = binding.readRef(buffer, keyClassInfoReadCache); } map.put(key, null); } } else { readNullKeyChunk(buffer, map, chunkHeader, valueSerializer, valueHasNull); } if (--size == 0) { return 0; } else { chunkHeader = buffer.readUnsignedByte(); } } } /** * Read chunk of size 1, the key is null. Since we can have at most one key whose value is null, * this method is not in critical path, make it as a separate method to let caller eligible for * jit inline. */ private void readNullKeyChunk( MemoryBuffer buffer, Map map, int chunkHeader, Serializer valueSerializer, boolean valueHasNull) { if (!valueHasNull) { Object value; boolean trackValueRef = (chunkHeader & TRACKING_VALUE_REF) != 0; if ((chunkHeader & VALUE_DECL_TYPE) != 0) { if (valueSerializer == null) { value = readNonEmptyValueFromNullChunk(buffer, trackValueRef, false); } else { fory.incReadDepth(); if (trackValueRef) { value = binding.readRef(buffer, valueSerializer); } else { value = binding.read(buffer, valueSerializer); } fory.decDepth(); } } else { value = binding.readRef(buffer, valueClassInfoReadCache); } map.put(null, value); } else { map.put(null, null); } } private Object readNonEmptyValueFromNullChunk( MemoryBuffer buffer, boolean trackRef, boolean isKey) { Generics generics = fory.getGenerics(); GenericType genericType = generics.nextGenericType(); if (genericType.getTypeParametersCount() < 2) { genericType = getKVGenericType(genericType); } GenericType type = isKey ? genericType.getTypeParameter0() : genericType.getTypeParameter1(); generics.pushGenericType(type); Serializer<?> serializer = type.getSerializer(typeResolver); Object v; fory.incReadDepth(); if (trackRef) { v = binding.readRef(buffer, serializer); } else { v = binding.read(buffer, serializer); } fory.decDepth(); generics.popGenericType(); return v; } @CodegenInvoke public long readNullChunkKVFinalNoRef( MemoryBuffer buffer, Map map, int chunkHeader, long size, Serializer keySerializer, Serializer valueSerializer) { while (true) { boolean keyHasNull = (chunkHeader & KEY_HAS_NULL) != 0; boolean valueHasNull = (chunkHeader & VALUE_HAS_NULL) != 0; if (!keyHasNull) { if (!valueHasNull) { return (size << 8) | chunkHeader; } else { fory.incReadDepth(); Object key = binding.read(buffer, keySerializer); map.put(key, null); fory.decDepth(); } } else { readNullKeyChunk(buffer, map, chunkHeader, valueSerializer, valueHasNull); } if (--size == 0) { return 0; } else { chunkHeader = buffer.readUnsignedByte(); } } } private long readJavaChunk( Fory fory, MemoryBuffer buffer, Map map, long size, int chunkHeader, Serializer keySerializer, Serializer valueSerializer) { // noinspection Duplicates boolean trackKeyRef = (chunkHeader & TRACKING_KEY_REF) != 0; boolean trackValueRef = (chunkHeader & TRACKING_VALUE_REF) != 0; boolean keyIsDeclaredType = (chunkHeader & KEY_DECL_TYPE) != 0; boolean valueIsDeclaredType = (chunkHeader & VALUE_DECL_TYPE) != 0; int chunkSize = buffer.readUnsignedByte(); if (!keyIsDeclaredType) { keySerializer = typeResolver.readClassInfo(buffer, keyClassInfoReadCache).getSerializer(); } if (!valueIsDeclaredType) { valueSerializer = typeResolver.readClassInfo(buffer, valueClassInfoReadCache).getSerializer(); } fory.incReadDepth(); for (int i = 0; i < chunkSize; i++) { Object key = trackKeyRef ? binding.readRef(buffer, keySerializer) : binding.read(buffer, keySerializer); Object value = trackValueRef ? binding.readRef(buffer, valueSerializer) : binding.read(buffer, valueSerializer); map.put(key, value); size--; } fory.decDepth(); return size > 0 ? (size << 8) | buffer.readUnsignedByte() : 0; } private long readJavaChunkGeneric( Fory fory, Generics generics, GenericType genericType, MemoryBuffer buffer, Map map, long size, int chunkHeader) { // type parameters count for `Map field` will be 0; // type parameters count for `SubMap<V> field` which SubMap is // `SubMap<V> implements Map<String, V>` will be 1; if (genericType.getTypeParametersCount() < 2) { genericType = getKVGenericType(genericType); } GenericType keyGenericType = genericType.getTypeParameter0(); GenericType valueGenericType = genericType.getTypeParameter1(); // noinspection Duplicates boolean trackKeyRef = (chunkHeader & TRACKING_KEY_REF) != 0; boolean trackValueRef = (chunkHeader & TRACKING_VALUE_REF) != 0; boolean keyIsDeclaredType = (chunkHeader & KEY_DECL_TYPE) != 0; boolean valueIsDeclaredType = (chunkHeader & VALUE_DECL_TYPE) != 0; int chunkSize = buffer.readUnsignedByte(); Serializer keySerializer, valueSerializer; if (!keyIsDeclaredType) { keySerializer = typeResolver.readClassInfo(buffer, keyClassInfoReadCache).getSerializer(); } else { keySerializer = keyGenericType.getSerializer(typeResolver); } if (!valueIsDeclaredType) { valueSerializer = typeResolver.readClassInfo(buffer, valueClassInfoReadCache).getSerializer(); } else { valueSerializer = valueGenericType.getSerializer(typeResolver); } if (keyGenericType.hasGenericParameters() || valueGenericType.hasGenericParameters()) { for (int i = 0; i < chunkSize; i++) { generics.pushGenericType(keyGenericType); fory.incReadDepth(); Object key = trackKeyRef ? binding.readRef(buffer, keySerializer) : binding.read(buffer, keySerializer); fory.decDepth(); generics.popGenericType(); generics.pushGenericType(valueGenericType); fory.incReadDepth(); Object value = trackValueRef ? binding.readRef(buffer, valueSerializer) : binding.read(buffer, valueSerializer); fory.decDepth(); generics.popGenericType(); map.put(key, value); size--; } } else { for (int i = 0; i < chunkSize; i++) { // increase depth to avoid read wrong outer generic type fory.incReadDepth(); Object key = trackKeyRef ? binding.readRef(buffer, keySerializer) : binding.read(buffer, keySerializer); Object value = trackValueRef ? binding.readRef(buffer, valueSerializer) : binding.read(buffer, valueSerializer); fory.decDepth(); map.put(key, value); size--; } } return size > 0 ? (size << 8) | buffer.readUnsignedByte() : 0; } /** * Hook for java serialization codegen, read/write key/value by entrySet. * * <p>For key/value type which is final, using codegen may get a big performance gain * * @return true if read/write key/value support calling entrySet method */ public final boolean supportCodegenHook() { return supportCodegenHook; } /** * Write data except size and elements. * * <ol> * In codegen, follows is call order: * <li>write map class if not final * <li>write map size * <li>onCollectionWrite * <li>write keys/values * </ol> */ public abstract Map onMapWrite(MemoryBuffer buffer, T value); public void onMapWriteFinish(Map map) {} /** * Read data except size and elements, return empty map to be filled. * * <ol> * In codegen, follows is call order: * <li>read map class if not final * <li>newMap: read and set map size, read map header and create map. * <li>read keys/values * </ol> * * <p>Map must have default constructor to be invoked by fory, otherwise created object can't be * used to adding elements. For example: * * <pre>{@code new ArrayList<Integer> {add(1);}}</pre> * * <p>without default constructor, created list will have elementData as null, adding elements * will raise NPE. */ public Map newMap(MemoryBuffer buffer) { numElements = buffer.readVarUint32Small7(); if (constructor == null) { constructor = ReflectionUtils.getCtrHandle(type, true); } try { Map instance = (Map) constructor.invoke(); fory.getRefResolver().reference(instance); return instance; } catch (Throwable e) { throw new IllegalArgumentException( "Please provide public no arguments constructor for class " + type, e); } } /** Create a new empty map for copy. */ public Map newMap(Map map) { numElements = map.size(); if (constructor == null) { constructor = ReflectionUtils.getCtrHandle(type, true); } try { return (Map) constructor.invoke(); } catch (Throwable e) { throw new IllegalArgumentException( "Please provide public no arguments constructor for class " + type, e); } } /** * Get and reset numElements of deserializing collection. Should be called after {@link * #newMap(MemoryBuffer buffer)}. Nested read may overwrite this element, reset is necessary to * avoid use wrong value by mistake. */ public int getAndClearNumElements() { int size = numElements; numElements = -1; // nested read may overwrite this element. return size; } public void setNumElements(int numElements) { this.numElements = numElements; } public abstract T onMapCopy(Map map); public abstract T onMapRead(Map map); }
apache/ignite-3
36,344
modules/client/src/main/java/org/apache/ignite/internal/client/TcpClientChannel.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.client; import static java.util.concurrent.CompletableFuture.completedFuture; import static java.util.concurrent.CompletableFuture.failedFuture; import static org.apache.ignite.internal.util.ExceptionUtils.copyExceptionWithCause; import static org.apache.ignite.internal.util.ExceptionUtils.sneakyThrow; import static org.apache.ignite.internal.util.FastTimestamps.coarseCurrentTimeMillis; import static org.apache.ignite.lang.ErrorGroups.Client.CONNECTION_ERR; import static org.apache.ignite.lang.ErrorGroups.Client.PROTOCOL_ERR; import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.channel.ChannelFuture; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.BitSet; import java.util.EnumSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Timer; import java.util.TimerTask; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Executor; import java.util.concurrent.ForkJoinPool; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Consumer; import org.apache.ignite.client.IgniteClientAuthenticator; import org.apache.ignite.client.IgniteClientConnectionException; import org.apache.ignite.internal.client.io.ClientConnection; import org.apache.ignite.internal.client.io.ClientConnectionMultiplexer; import org.apache.ignite.internal.client.io.ClientConnectionStateHandler; import org.apache.ignite.internal.client.io.ClientMessageHandler; import org.apache.ignite.internal.client.proto.ClientMessageCommon; import org.apache.ignite.internal.client.proto.ClientMessagePacker; import org.apache.ignite.internal.client.proto.ClientMessageUnpacker; import org.apache.ignite.internal.client.proto.ClientOp; import org.apache.ignite.internal.client.proto.ErrorExtensions; import org.apache.ignite.internal.client.proto.HandshakeExtension; import org.apache.ignite.internal.client.proto.HandshakeUtils; import org.apache.ignite.internal.client.proto.ProtocolBitmaskFeature; import org.apache.ignite.internal.client.proto.ProtocolVersion; import org.apache.ignite.internal.client.proto.ResponseFlags; import org.apache.ignite.internal.future.timeout.TimeoutObject; import org.apache.ignite.internal.logger.IgniteLogger; import org.apache.ignite.internal.thread.PublicApiThreading; import org.apache.ignite.internal.tostring.S; import org.apache.ignite.internal.util.ViewUtils; import org.apache.ignite.lang.ErrorGroups.Table; import org.apache.ignite.lang.IgniteException; import org.apache.ignite.network.NetworkAddress; import org.apache.ignite.sql.SqlBatchException; import org.jetbrains.annotations.Nullable; /** * Implements {@link ClientChannel} over TCP. */ class TcpClientChannel implements ClientChannel, ClientMessageHandler, ClientConnectionStateHandler { /** Protocol version used by default on first connection attempt. */ private static final ProtocolVersion DEFAULT_VERSION = ProtocolVersion.LATEST_VER; /** Supported features. */ private static final BitSet SUPPORTED_FEATURES = ProtocolBitmaskFeature.featuresAsBitSet(EnumSet.of( ProtocolBitmaskFeature.USER_ATTRIBUTES, ProtocolBitmaskFeature.TABLE_GET_REQS_USE_QUALIFIED_NAME, ProtocolBitmaskFeature.TX_DIRECT_MAPPING, ProtocolBitmaskFeature.PLATFORM_COMPUTE_JOB, ProtocolBitmaskFeature.COMPUTE_TASK_ID, ProtocolBitmaskFeature.TX_DELAYED_ACKS, ProtocolBitmaskFeature.TX_PIGGYBACK, ProtocolBitmaskFeature.TX_ALLOW_NOOP_ENLIST, ProtocolBitmaskFeature.SQL_PARTITION_AWARENESS, ProtocolBitmaskFeature.SQL_DIRECT_TX_MAPPING )); /** Minimum supported heartbeat interval. */ private static final long MIN_RECOMMENDED_HEARTBEAT_INTERVAL = 500; /** Config. */ private final ClientChannelConfiguration cfg; /** Metrics. */ private final ClientMetricSource metrics; /** Protocol context. */ private volatile ProtocolContext protocolCtx; /** Channel. */ private volatile ClientConnection sock; /** Request id. */ private final AtomicLong reqId = new AtomicLong(1); /** Pending requests. */ private final ConcurrentMap<Long, TimeoutObjectImpl> pendingReqs = new ConcurrentHashMap<>(); /** Notification handlers. */ private final Map<Long, CompletableFuture<PayloadInputChannel>> notificationHandlers = new ConcurrentHashMap<>(); /** Topology change listeners. */ private final Consumer<Long> assignmentChangeListener; /** Observable timestamp listeners. */ private final Consumer<Long> observableTimestampListener; /** Inflights. */ private final ClientTransactionInflights inflights; /** Closed flag. */ private final AtomicBoolean closed = new AtomicBoolean(); /** Executor for async operation listeners. */ private final Executor asyncContinuationExecutor; /** Connect timeout in milliseconds. */ private final long connectTimeout; /** Heartbeat timeout in milliseconds. */ private final long heartbeatTimeout; /** Operation timeout in milliseconds. */ private final long operationTimeout; /** Heartbeat timer. */ private volatile Timer heartbeatTimer; /** Logger. */ private final IgniteLogger log; /** Last send operation timestamp. */ private volatile long lastSendMillis; /** Last receive operation timestamp. */ private volatile long lastReceiveMillis; /** * Constructor. * * @param cfg Config. * @param metrics Metrics. */ private TcpClientChannel( ClientChannelConfiguration cfg, ClientMetricSource metrics, Consumer<Long> assignmentChangeListener, Consumer<Long> observableTimestampListener, ClientTransactionInflights inflights) { validateConfiguration(cfg); this.cfg = cfg; this.metrics = metrics; this.assignmentChangeListener = assignmentChangeListener; this.observableTimestampListener = observableTimestampListener; this.inflights = inflights; log = ClientUtils.logger(cfg.clientConfiguration(), TcpClientChannel.class); asyncContinuationExecutor = cfg.clientConfiguration().asyncContinuationExecutor() == null ? ForkJoinPool.commonPool() : cfg.clientConfiguration().asyncContinuationExecutor(); connectTimeout = cfg.clientConfiguration().connectTimeout(); heartbeatTimeout = cfg.clientConfiguration().heartbeatTimeout(); operationTimeout = cfg.clientConfiguration().operationTimeout(); } private CompletableFuture<ClientChannel> initAsync(ClientConnectionMultiplexer connMgr) { return connMgr .openAsync(cfg.getAddress(), this, this) .thenCompose(s -> { if (log.isDebugEnabled()) { log.debug("Connection established [remoteAddress=" + s.remoteAddress() + ']'); } ClientTimeoutWorker.INSTANCE.registerClientChannel(this); sock = s; return handshakeAsync(DEFAULT_VERSION); }) .whenComplete((res, err) -> { if (err != null) { close(); } }) .thenApplyAsync(unused -> { // Netty has a built-in IdleStateHandler to detect idle connections (used on the server side). // However, to adjust the heartbeat interval dynamically, we have to use a timer here. if (protocolCtx != null) { heartbeatTimer = initHeartbeat(cfg.clientConfiguration().heartbeatInterval()); } return this; }, asyncContinuationExecutor); } /** * Creates a new channel asynchronously. * * @param cfg Configuration. * @param connMgr Connection manager. * @param metrics Metrics. * @return Channel. */ static CompletableFuture<ClientChannel> createAsync( ClientChannelConfiguration cfg, ClientConnectionMultiplexer connMgr, ClientMetricSource metrics, Consumer<Long> assignmentChangeListener, Consumer<Long> observableTimestampListener, ClientTransactionInflights inflights) { //noinspection resource - returned from method. return new TcpClientChannel(cfg, metrics, assignmentChangeListener, observableTimestampListener, inflights) .initAsync(connMgr); } /** {@inheritDoc} */ @Override public void close() { close(null, true); } /** * Close the channel with cause. */ private void close(@Nullable Throwable cause, boolean graceful) { if (!closed.compareAndSet(false, true)) { return; } if (cause != null && (cause instanceof TimeoutException || cause.getCause() instanceof TimeoutException)) { metrics.connectionsLostTimeoutIncrement(); } else if (!graceful) { metrics.connectionsLostIncrement(); } // Disconnect can happen before we initialize the timer. var timer = heartbeatTimer; if (timer != null) { timer.cancel(); } for (TimeoutObjectImpl pendingReq : pendingReqs.values()) { pendingReq.future().completeExceptionally( new IgniteClientConnectionException(CONNECTION_ERR, "Channel is closed", endpoint(), cause)); } for (CompletableFuture<PayloadInputChannel> handler : notificationHandlers.values()) { try { handler.completeExceptionally( new IgniteClientConnectionException(CONNECTION_ERR, "Channel is closed", endpoint(), cause)); } catch (Throwable ignored) { // Ignore. } } if (sock != null) { try { sock.close(); } catch (Throwable t) { log.warn("Failed to close the channel [remoteAddress=" + cfg.getAddress() + "]: " + t.getMessage(), t); } } } /** {@inheritDoc} */ @Override public void onMessage(ByteBuf buf) { lastReceiveMillis = System.currentTimeMillis(); try (var unpacker = new ClientMessageUnpacker(buf)) { processNextMessage(unpacker); } catch (Throwable t) { close(t, false); } } /** {@inheritDoc} */ @Override public void onDisconnected(@Nullable Exception e) { if (log.isDebugEnabled()) { log.debug("Connection closed [remoteAddress=" + cfg.getAddress() + ']'); } close(e, false); } /** {@inheritDoc} */ @Override public <T> CompletableFuture<T> serviceAsync( int opCode, @Nullable PayloadWriter payloadWriter, @Nullable PayloadReader<T> payloadReader, boolean expectNotifications ) { try { if (log.isTraceEnabled()) { log.trace("Sending request [opCode=" + opCode + ", remoteAddress=" + cfg.getAddress() + ']'); } long id = reqId.getAndIncrement(); CompletableFuture<PayloadInputChannel> notificationFut = null; if (expectNotifications) { // Notification can arrive before the response to the current request. // This is fine, because we use the same id and register the handler before sending the request. notificationFut = new CompletableFuture<>(); notificationHandlers.put(id, notificationFut); } return send(opCode, id, payloadWriter, payloadReader, notificationFut, operationTimeout); } catch (Throwable t) { return failedFuture(t); } } /** * Sends request. * * @param opCode Operation code. * @param id Request id. * @param payloadWriter Payload writer to stream or {@code null} if request has no payload. * @param notificationFut Optional notification future. * @return Request future. */ private <T> CompletableFuture<T> send( int opCode, long id, @Nullable PayloadWriter payloadWriter, @Nullable PayloadReader<T> payloadReader, @Nullable CompletableFuture<PayloadInputChannel> notificationFut, long timeout ) { if (closed()) { throw new IgniteClientConnectionException(CONNECTION_ERR, "Channel is closed", endpoint()); } var fut = new CompletableFuture<ClientMessageUnpacker>(); pendingReqs.put(id, new TimeoutObjectImpl(timeout, fut)); metrics.requestsActiveIncrement(); PayloadOutputChannel payloadCh = new PayloadOutputChannel(this, new ClientMessagePacker(sock.getBuffer()), id); try { var req = payloadCh.out(); req.packInt(opCode); req.packLong(id); if (payloadWriter != null) { payloadWriter.accept(payloadCh); } write(req).addListener(f -> { if (!f.isSuccess()) { String msg = "Failed to send request [id=" + id + ", op=" + opCode + ", remoteAddress=" + cfg.getAddress() + "]"; IgniteClientConnectionException ex = new IgniteClientConnectionException(CONNECTION_ERR, msg, endpoint(), f.cause()); fut.completeExceptionally(ex); log.warn(msg + "]: " + f.cause().getMessage(), f.cause()); pendingReqs.remove(id); metrics.requestsActiveDecrement(); // Close immediately, do not wait for onDisconnected call from Netty. onDisconnected(ex); } else { metrics.requestsSentIncrement(); Runnable action = payloadCh.onSentAction(); if (action != null) { asyncContinuationExecutor.execute(action); } } }); // Allow parallelism for batch operations. if (PublicApiThreading.executingSyncPublicApi() && !ClientOp.isBatch(opCode)) { // We are in the public API (user) thread, deserialize the response here. try { ClientMessageUnpacker unpacker = fut.join(); return completedFuture(complete(payloadReader, notificationFut, unpacker)); } catch (Throwable t) { throw sneakyThrow(ViewUtils.ensurePublicException(t)); } } // Handle the response in the async continuation pool with completeAsync. return fut .thenCompose(unpacker -> completeAsync(payloadReader, notificationFut, unpacker)) .exceptionally(err -> { throw sneakyThrow(ViewUtils.ensurePublicException(err)); }); } catch (Throwable t) { log.warn("Failed to send request [id=" + id + ", op=" + opCode + ", remoteAddress=" + cfg.getAddress() + "]: " + t.getMessage(), t); // Close buffer manually on fail. Successful write closes the buffer automatically. payloadCh.close(); pendingReqs.remove(id); metrics.requestsActiveDecrement(); throw sneakyThrow(ViewUtils.ensurePublicException(t)); } } private <T> CompletableFuture<T> completeAsync( @Nullable PayloadReader<T> payloadReader, @Nullable CompletableFuture<PayloadInputChannel> notificationFut, ClientMessageUnpacker unpacker ) { try { CompletableFuture<T> resFut = new CompletableFuture<>(); // Use asyncContinuationExecutor explicitly to close unpacker if the executor throws. // With handleAsync et al we can't close the unpacker in that case. asyncContinuationExecutor.execute(() -> { try { resFut.complete(complete(payloadReader, notificationFut, unpacker)); } catch (Throwable t) { resFut.completeExceptionally(ViewUtils.ensurePublicException(t)); } }); return resFut; } catch (Throwable t) { unpacker.close(); return failedFuture(ViewUtils.ensurePublicException(t)); } } /** * Completes the request future. * * @param payloadReader Payload reader. * @param notificationFut Notify future. * @param unpacker Unpacked message. */ private <T> @Nullable T complete( @Nullable PayloadReader<T> payloadReader, @Nullable CompletableFuture<PayloadInputChannel> notificationFut, ClientMessageUnpacker unpacker ) { try (unpacker) { if (payloadReader != null) { return payloadReader.apply(new PayloadInputChannel(this, unpacker, notificationFut)); } return null; } catch (Throwable e) { log.error("Failed to deserialize server response [remoteAddress=" + cfg.getAddress() + "]: " + e.getMessage(), e); throw new IgniteException(PROTOCOL_ERR, "Failed to deserialize server response: " + e.getMessage(), e); } } /** * Process next message from the input stream and complete corresponding future. */ private void processNextMessage(ClientMessageUnpacker unpacker) throws IgniteException { if (protocolCtx == null) { // Process handshake. completeRequestFuture(pendingReqs.remove(-1L).future(), unpacker); return; } Long resId = unpacker.unpackLong(); int flags = unpacker.unpackInt(); handlePartitionAssignmentChange(flags, unpacker); handleObservableTimestamp(unpacker); Throwable err = ResponseFlags.getErrorFlag(flags) ? readError(unpacker) : null; if (ResponseFlags.getNotificationFlag(flags)) { handleNotification(resId, unpacker, err); return; } TimeoutObjectImpl pendingReq = pendingReqs.remove(resId); if (pendingReq == null) { log.error("Unexpected response ID [remoteAddress=" + cfg.getAddress() + "]: " + resId); throw new IgniteClientConnectionException(PROTOCOL_ERR, String.format("Unexpected response ID [%s]", resId), endpoint()); } metrics.requestsActiveDecrement(); if (err == null) { metrics.requestsCompletedIncrement(); completeRequestFuture(pendingReq.future(), unpacker); } else { metrics.requestsFailedIncrement(); notificationHandlers.remove(resId); pendingReq.future().completeExceptionally(err); } } private void handleObservableTimestamp(ClientMessageUnpacker unpacker) { long observableTimestamp = unpacker.unpackLong(); observableTimestampListener.accept(observableTimestamp); } private void handlePartitionAssignmentChange(int flags, ClientMessageUnpacker unpacker) { if (ResponseFlags.getPartitionAssignmentChangedFlag(flags)) { if (log.isInfoEnabled()) { log.info("Partition assignment change notification received [remoteAddress=" + cfg.getAddress() + "]"); } long maxStartTime = unpacker.unpackLong(); assignmentChangeListener.accept(maxStartTime); } } private void handleNotification(long id, ClientMessageUnpacker unpacker, @Nullable Throwable err) { // One-shot notification handler - remove immediately. CompletableFuture<PayloadInputChannel> handler = notificationHandlers.remove(id); if (handler == null) { // Default notification handler. Used to deliver delayed replication acks. UUID txId = unpacker.unpackUuid(); inflights.removeInflight(txId, err); return; } completeNotificationFuture(handler, unpacker, err); } /** * Unpacks request error. * * @param unpacker Unpacker. * @return Exception. */ private static Throwable readError(ClientMessageUnpacker unpacker) { var traceId = unpacker.unpackUuid(); var code = unpacker.unpackInt(); var errClassName = unpacker.unpackString(); var errMsg = unpacker.tryUnpackNil() ? null : unpacker.unpackString(); IgniteException causeWithStackTrace = unpacker.tryUnpackNil() ? null : new IgniteException(traceId, code, unpacker.unpackString()); int extSize = unpacker.tryUnpackNil() ? 0 : unpacker.unpackInt(); int expectedSchemaVersion = -1; long[] sqlUpdateCounters = null; for (int i = 0; i < extSize; i++) { String key = unpacker.unpackString(); if (key.equals(ErrorExtensions.EXPECTED_SCHEMA_VERSION)) { expectedSchemaVersion = unpacker.unpackInt(); } else if (key.equals(ErrorExtensions.SQL_UPDATE_COUNTERS)) { sqlUpdateCounters = unpacker.unpackLongArray(); } else { // Unknown extension - ignore. unpacker.skipValues(1); } } if (code == Table.SCHEMA_VERSION_MISMATCH_ERR) { if (expectedSchemaVersion == -1) { return new IgniteException( traceId, PROTOCOL_ERR, "Expected schema version is not specified in error extension map.", causeWithStackTrace); } return new ClientSchemaVersionMismatchException(traceId, code, errMsg, expectedSchemaVersion, causeWithStackTrace); } if (sqlUpdateCounters != null) { errMsg = errMsg != null ? errMsg : "SQL batch execution error"; return new SqlBatchException(traceId, code, sqlUpdateCounters, errMsg, causeWithStackTrace); } try { Class<? extends Throwable> errCls = (Class<? extends Throwable>) Class.forName(errClassName); return copyExceptionWithCause(errCls, traceId, code, errMsg, causeWithStackTrace); } catch (ClassNotFoundException ignored) { // Ignore: incompatible exception class. Fall back to generic exception. } return new IgniteException(traceId, code, errClassName + ": " + errMsg, causeWithStackTrace); } /** {@inheritDoc} */ @Override public boolean closed() { return closed.get(); } /** {@inheritDoc} */ @Override public ProtocolContext protocolContext() { return protocolCtx; } @Override public ClientTransactionInflights inflights() { return inflights; } private static void validateConfiguration(ClientChannelConfiguration cfg) { String error = null; InetSocketAddress addr = cfg.getAddress(); if (addr == null) { error = "At least one Ignite server node must be specified in the Ignite client configuration"; } if (error != null) { throw new IllegalArgumentException(error); } } /** Client handshake. */ private CompletableFuture<Object> handshakeAsync(ProtocolVersion ver) throws IgniteClientConnectionException { var fut = new CompletableFuture<ClientMessageUnpacker>(); pendingReqs.put(-1L, new TimeoutObjectImpl(connectTimeout, fut)); handshakeReqAsync(ver).addListener(f -> { if (!f.isSuccess()) { fut.completeExceptionally( new IgniteClientConnectionException(CONNECTION_ERR, "Failed to send handshake request", endpoint(), f.cause())); } }); return fut .thenCompose(unpacker -> completeAsync(r -> handshakeRes(r.in()), null, unpacker)) .exceptionally(err -> { if (err instanceof TimeoutException || err.getCause() instanceof TimeoutException) { metrics.handshakesFailedTimeoutIncrement(); throw new IgniteClientConnectionException(CONNECTION_ERR, "Handshake timeout", endpoint(), err); } metrics.handshakesFailedIncrement(); throw new IgniteClientConnectionException(CONNECTION_ERR, "Handshake error", endpoint(), err); }); } /** * Send handshake request. * * @return Channel future. */ private ChannelFuture handshakeReqAsync(ProtocolVersion proposedVer) { sock.send(Unpooled.wrappedBuffer(ClientMessageCommon.MAGIC_BYTES)); var req = new ClientMessagePacker(sock.getBuffer()); req.packInt(proposedVer.major()); req.packInt(proposedVer.minor()); req.packInt(proposedVer.patch()); req.packInt(HandshakeUtils.CLIENT_TYPE_GENERAL); HandshakeUtils.packFeatures(req, SUPPORTED_FEATURES); IgniteClientAuthenticator authenticator = cfg.clientConfiguration().authenticator(); if (authenticator != null) { Map<HandshakeExtension, Object> extensions = Map.of( HandshakeExtension.AUTHENTICATION_TYPE, authenticator.type(), HandshakeExtension.AUTHENTICATION_IDENTITY, authenticator.identity(), HandshakeExtension.AUTHENTICATION_SECRET, authenticator.secret()); HandshakeUtils.packExtensions(req, extensions); } else { HandshakeUtils.packExtensions(req, Map.of()); } return write(req); } private @Nullable Object handshakeRes(ClientMessageUnpacker unpacker) { try { ProtocolVersion srvVer = new ProtocolVersion(unpacker.unpackShort(), unpacker.unpackShort(), unpacker.unpackShort()); if (!unpacker.tryUnpackNil()) { throw sneakyThrow(readError(unpacker)); } var serverIdleTimeout = unpacker.unpackLong(); UUID clusterNodeId = unpacker.unpackUuid(); var clusterNodeName = unpacker.unpackString(); var addr = sock.remoteAddress(); var clusterNode = new ClientClusterNode(clusterNodeId, clusterNodeName, new NetworkAddress(addr.getHostName(), addr.getPort())); int clusterIdsLen = unpacker.unpackInt(); if (clusterIdsLen <= 0) { throw new IgniteClientConnectionException(PROTOCOL_ERR, "Unexpected cluster ids count: " + clusterIdsLen, endpoint()); } List<UUID> clusterIds = new ArrayList<>(clusterIdsLen); for (int i = 0; i < clusterIdsLen; i++) { clusterIds.add(unpacker.unpackUuid()); } var clusterName = unpacker.unpackString(); long observableTimestamp = unpacker.unpackLong(); observableTimestampListener.accept(observableTimestamp); unpacker.unpackByte(); // cluster version major unpacker.unpackByte(); // cluster version minor unpacker.unpackByte(); // cluster version maintenance unpacker.unpackByteNullable(); // cluster version patch unpacker.unpackStringNullable(); // cluster version pre release BitSet serverFeatures = HandshakeUtils.unpackFeatures(unpacker); HandshakeUtils.unpackExtensions(unpacker); BitSet mutuallySupportedFeatures = HandshakeUtils.supportedFeatures(SUPPORTED_FEATURES, serverFeatures); EnumSet<ProtocolBitmaskFeature> features = ProtocolBitmaskFeature.enumSet(mutuallySupportedFeatures); protocolCtx = new ProtocolContext(srvVer, features, serverIdleTimeout, clusterNode, clusterIds, clusterName); return null; } catch (Throwable e) { log.warn("Failed to handle handshake response [remoteAddress=" + cfg.getAddress() + "]: " + e.getMessage(), e); throw e; } } /** Write bytes to the output stream. */ private ChannelFuture write(ClientMessagePacker packer) throws IgniteClientConnectionException { // Ignore race condition here. lastSendMillis = System.currentTimeMillis(); var buf = packer.getBuffer(); return sock.send(buf); } /** * Initializes heartbeats. * * @param configuredInterval Configured heartbeat interval, in milliseconds. * @return Heartbeat timer. */ private Timer initHeartbeat(long configuredInterval) { long heartbeatInterval = getHeartbeatInterval(configuredInterval); Timer timer = new Timer("tcp-client-channel-heartbeats-" + hashCode()); timer.schedule(new HeartbeatTask(heartbeatInterval), heartbeatInterval, heartbeatInterval); return timer; } /** * Gets the heartbeat interval based on the configured value and served-side idle timeout. * * @param configuredInterval Configured interval. * @return Resolved interval. */ private long getHeartbeatInterval(long configuredInterval) { long serverIdleTimeoutMs = protocolCtx.serverIdleTimeout(); if (serverIdleTimeoutMs <= 0) { return configuredInterval; } long recommendedHeartbeatInterval = serverIdleTimeoutMs / 3; if (recommendedHeartbeatInterval < MIN_RECOMMENDED_HEARTBEAT_INTERVAL) { recommendedHeartbeatInterval = MIN_RECOMMENDED_HEARTBEAT_INTERVAL; } return Math.min(configuredInterval, recommendedHeartbeatInterval); } @Override public String toString() { return S.toString(TcpClientChannel.class.getSimpleName(), "remoteAddress", sock.remoteAddress(), false); } @Override public String endpoint() { return cfg.getAddress().toString(); } private static void completeRequestFuture(CompletableFuture<ClientMessageUnpacker> fut, ClientMessageUnpacker unpacker) { // Add reference count before jumping onto another thread (due to handleAsync() in send()). unpacker.retain(); try { if (!fut.complete(unpacker)) { unpacker.close(); } } catch (Throwable t) { unpacker.close(); throw t; } } private void completeNotificationFuture( CompletableFuture<PayloadInputChannel> fut, ClientMessageUnpacker unpacker, @Nullable Throwable err) { if (err != null) { asyncContinuationExecutor.execute(() -> fut.completeExceptionally(err)); return; } // Add reference count before jumping onto another thread. unpacker.retain(); try { asyncContinuationExecutor.execute(() -> { try { if (!fut.complete(new PayloadInputChannel(this, unpacker, null))) { unpacker.close(); } } catch (Throwable e) { unpacker.close(); log.error("Failed to handle server notification [remoteAddress=" + cfg.getAddress() + "]: " + e.getMessage(), e); } }); } catch (Throwable t) { unpacker.close(); throw t; } } void checkTimeouts(long now) { for (Entry<Long, TimeoutObjectImpl> req : pendingReqs.entrySet()) { TimeoutObject<CompletableFuture<ClientMessageUnpacker>> timeoutObject = req.getValue(); if (timeoutObject != null && timeoutObject.endTime() > 0 && now > timeoutObject.endTime()) { // Client-facing future will fail with a timeout, but internal ClientRequestFuture will stay in the map - // otherwise we'll fail with "protocol breakdown" error when a late response arrives from the server. CompletableFuture<?> fut = timeoutObject.future(); fut.completeExceptionally(new TimeoutException()); } } } /** * Timeout object wrapper for the completable future. */ private static class TimeoutObjectImpl implements TimeoutObject<CompletableFuture<ClientMessageUnpacker>> { /** End time (milliseconds since Unix epoch). */ private final long endTime; /** Target future. */ private final CompletableFuture<ClientMessageUnpacker> fut; /** * Constructor. * * @param timeout Timeout in milliseconds. * @param fut Target future. */ private TimeoutObjectImpl(long timeout, CompletableFuture<ClientMessageUnpacker> fut) { this.endTime = timeout > 0 ? coarseCurrentTimeMillis() + timeout : 0; this.fut = fut; } @Override public long endTime() { return endTime; } @Override public CompletableFuture<ClientMessageUnpacker> future() { return fut; } } /** * Sends heartbeat messages. */ private class HeartbeatTask extends TimerTask { /** Heartbeat interval. */ private final long interval; /** Constructor. */ HeartbeatTask(long interval) { this.interval = interval; } /** {@inheritDoc} */ @Override public void run() { try { if (System.currentTimeMillis() - lastSendMillis > interval) { var fut = serviceAsync(ClientOp.HEARTBEAT, null, null, false); if (heartbeatTimeout > 0) { fut .orTimeout(heartbeatTimeout, TimeUnit.MILLISECONDS) .exceptionally(e -> { if (e instanceof TimeoutException) { long lastResponseAge = System.currentTimeMillis() - lastReceiveMillis; if (lastResponseAge < heartbeatTimeout) { // The last response was received within the timeout, so the connection is still alive. // Ignore the timeout from heartbeat message. return null; } log.warn("Heartbeat timeout, closing the channel [remoteAddress=" + cfg.getAddress() + ']'); close(new IgniteClientConnectionException( CONNECTION_ERR, "Heartbeat timeout", endpoint(), e), false); } return null; }); } } } catch (Throwable e) { log.warn("Failed to send heartbeat [remoteAddress=" + cfg.getAddress() + "]: " + e.getMessage(), e); } } } }
googleapis/google-cloud-java
36,136
java-service-management/proto-google-cloud-service-management-v1/src/main/java/com/google/api/servicemanagement/v1/ListServiceRolloutsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/api/servicemanagement/v1/servicemanager.proto // Protobuf Java Version: 3.25.8 package com.google.api.servicemanagement.v1; /** * * * <pre> * Response message for ListServiceRollouts method. * </pre> * * Protobuf type {@code google.api.servicemanagement.v1.ListServiceRolloutsResponse} */ public final class ListServiceRolloutsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.api.servicemanagement.v1.ListServiceRolloutsResponse) ListServiceRolloutsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListServiceRolloutsResponse.newBuilder() to construct. private ListServiceRolloutsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListServiceRolloutsResponse() { rollouts_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListServiceRolloutsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.api.servicemanagement.v1.ServiceManagerProto .internal_static_google_api_servicemanagement_v1_ListServiceRolloutsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.api.servicemanagement.v1.ServiceManagerProto .internal_static_google_api_servicemanagement_v1_ListServiceRolloutsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.api.servicemanagement.v1.ListServiceRolloutsResponse.class, com.google.api.servicemanagement.v1.ListServiceRolloutsResponse.Builder.class); } public static final int ROLLOUTS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.api.servicemanagement.v1.Rollout> rollouts_; /** * * * <pre> * The list of rollout resources. * </pre> * * <code>repeated .google.api.servicemanagement.v1.Rollout rollouts = 1;</code> */ @java.lang.Override public java.util.List<com.google.api.servicemanagement.v1.Rollout> getRolloutsList() { return rollouts_; } /** * * * <pre> * The list of rollout resources. * </pre> * * <code>repeated .google.api.servicemanagement.v1.Rollout rollouts = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.api.servicemanagement.v1.RolloutOrBuilder> getRolloutsOrBuilderList() { return rollouts_; } /** * * * <pre> * The list of rollout resources. * </pre> * * <code>repeated .google.api.servicemanagement.v1.Rollout rollouts = 1;</code> */ @java.lang.Override public int getRolloutsCount() { return rollouts_.size(); } /** * * * <pre> * The list of rollout resources. * </pre> * * <code>repeated .google.api.servicemanagement.v1.Rollout rollouts = 1;</code> */ @java.lang.Override public com.google.api.servicemanagement.v1.Rollout getRollouts(int index) { return rollouts_.get(index); } /** * * * <pre> * The list of rollout resources. * </pre> * * <code>repeated .google.api.servicemanagement.v1.Rollout rollouts = 1;</code> */ @java.lang.Override public com.google.api.servicemanagement.v1.RolloutOrBuilder getRolloutsOrBuilder(int index) { return rollouts_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The token of the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * The token of the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < rollouts_.size(); i++) { output.writeMessage(1, rollouts_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < rollouts_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, rollouts_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.api.servicemanagement.v1.ListServiceRolloutsResponse)) { return super.equals(obj); } com.google.api.servicemanagement.v1.ListServiceRolloutsResponse other = (com.google.api.servicemanagement.v1.ListServiceRolloutsResponse) obj; if (!getRolloutsList().equals(other.getRolloutsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getRolloutsCount() > 0) { hash = (37 * hash) + ROLLOUTS_FIELD_NUMBER; hash = (53 * hash) + getRolloutsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.api.servicemanagement.v1.ListServiceRolloutsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.api.servicemanagement.v1.ListServiceRolloutsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.api.servicemanagement.v1.ListServiceRolloutsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.api.servicemanagement.v1.ListServiceRolloutsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.api.servicemanagement.v1.ListServiceRolloutsResponse parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.api.servicemanagement.v1.ListServiceRolloutsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.api.servicemanagement.v1.ListServiceRolloutsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.api.servicemanagement.v1.ListServiceRolloutsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.api.servicemanagement.v1.ListServiceRolloutsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.api.servicemanagement.v1.ListServiceRolloutsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.api.servicemanagement.v1.ListServiceRolloutsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.api.servicemanagement.v1.ListServiceRolloutsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.api.servicemanagement.v1.ListServiceRolloutsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for ListServiceRollouts method. * </pre> * * Protobuf type {@code google.api.servicemanagement.v1.ListServiceRolloutsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.api.servicemanagement.v1.ListServiceRolloutsResponse) com.google.api.servicemanagement.v1.ListServiceRolloutsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.api.servicemanagement.v1.ServiceManagerProto .internal_static_google_api_servicemanagement_v1_ListServiceRolloutsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.api.servicemanagement.v1.ServiceManagerProto .internal_static_google_api_servicemanagement_v1_ListServiceRolloutsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.api.servicemanagement.v1.ListServiceRolloutsResponse.class, com.google.api.servicemanagement.v1.ListServiceRolloutsResponse.Builder.class); } // Construct using com.google.api.servicemanagement.v1.ListServiceRolloutsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (rolloutsBuilder_ == null) { rollouts_ = java.util.Collections.emptyList(); } else { rollouts_ = null; rolloutsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.api.servicemanagement.v1.ServiceManagerProto .internal_static_google_api_servicemanagement_v1_ListServiceRolloutsResponse_descriptor; } @java.lang.Override public com.google.api.servicemanagement.v1.ListServiceRolloutsResponse getDefaultInstanceForType() { return com.google.api.servicemanagement.v1.ListServiceRolloutsResponse.getDefaultInstance(); } @java.lang.Override public com.google.api.servicemanagement.v1.ListServiceRolloutsResponse build() { com.google.api.servicemanagement.v1.ListServiceRolloutsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.api.servicemanagement.v1.ListServiceRolloutsResponse buildPartial() { com.google.api.servicemanagement.v1.ListServiceRolloutsResponse result = new com.google.api.servicemanagement.v1.ListServiceRolloutsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.api.servicemanagement.v1.ListServiceRolloutsResponse result) { if (rolloutsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { rollouts_ = java.util.Collections.unmodifiableList(rollouts_); bitField0_ = (bitField0_ & ~0x00000001); } result.rollouts_ = rollouts_; } else { result.rollouts_ = rolloutsBuilder_.build(); } } private void buildPartial0( com.google.api.servicemanagement.v1.ListServiceRolloutsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.api.servicemanagement.v1.ListServiceRolloutsResponse) { return mergeFrom((com.google.api.servicemanagement.v1.ListServiceRolloutsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.api.servicemanagement.v1.ListServiceRolloutsResponse other) { if (other == com.google.api.servicemanagement.v1.ListServiceRolloutsResponse.getDefaultInstance()) return this; if (rolloutsBuilder_ == null) { if (!other.rollouts_.isEmpty()) { if (rollouts_.isEmpty()) { rollouts_ = other.rollouts_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureRolloutsIsMutable(); rollouts_.addAll(other.rollouts_); } onChanged(); } } else { if (!other.rollouts_.isEmpty()) { if (rolloutsBuilder_.isEmpty()) { rolloutsBuilder_.dispose(); rolloutsBuilder_ = null; rollouts_ = other.rollouts_; bitField0_ = (bitField0_ & ~0x00000001); rolloutsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRolloutsFieldBuilder() : null; } else { rolloutsBuilder_.addAllMessages(other.rollouts_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.api.servicemanagement.v1.Rollout m = input.readMessage( com.google.api.servicemanagement.v1.Rollout.parser(), extensionRegistry); if (rolloutsBuilder_ == null) { ensureRolloutsIsMutable(); rollouts_.add(m); } else { rolloutsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.api.servicemanagement.v1.Rollout> rollouts_ = java.util.Collections.emptyList(); private void ensureRolloutsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { rollouts_ = new java.util.ArrayList<com.google.api.servicemanagement.v1.Rollout>(rollouts_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.api.servicemanagement.v1.Rollout, com.google.api.servicemanagement.v1.Rollout.Builder, com.google.api.servicemanagement.v1.RolloutOrBuilder> rolloutsBuilder_; /** * * * <pre> * The list of rollout resources. * </pre> * * <code>repeated .google.api.servicemanagement.v1.Rollout rollouts = 1;</code> */ public java.util.List<com.google.api.servicemanagement.v1.Rollout> getRolloutsList() { if (rolloutsBuilder_ == null) { return java.util.Collections.unmodifiableList(rollouts_); } else { return rolloutsBuilder_.getMessageList(); } } /** * * * <pre> * The list of rollout resources. * </pre> * * <code>repeated .google.api.servicemanagement.v1.Rollout rollouts = 1;</code> */ public int getRolloutsCount() { if (rolloutsBuilder_ == null) { return rollouts_.size(); } else { return rolloutsBuilder_.getCount(); } } /** * * * <pre> * The list of rollout resources. * </pre> * * <code>repeated .google.api.servicemanagement.v1.Rollout rollouts = 1;</code> */ public com.google.api.servicemanagement.v1.Rollout getRollouts(int index) { if (rolloutsBuilder_ == null) { return rollouts_.get(index); } else { return rolloutsBuilder_.getMessage(index); } } /** * * * <pre> * The list of rollout resources. * </pre> * * <code>repeated .google.api.servicemanagement.v1.Rollout rollouts = 1;</code> */ public Builder setRollouts(int index, com.google.api.servicemanagement.v1.Rollout value) { if (rolloutsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRolloutsIsMutable(); rollouts_.set(index, value); onChanged(); } else { rolloutsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of rollout resources. * </pre> * * <code>repeated .google.api.servicemanagement.v1.Rollout rollouts = 1;</code> */ public Builder setRollouts( int index, com.google.api.servicemanagement.v1.Rollout.Builder builderForValue) { if (rolloutsBuilder_ == null) { ensureRolloutsIsMutable(); rollouts_.set(index, builderForValue.build()); onChanged(); } else { rolloutsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of rollout resources. * </pre> * * <code>repeated .google.api.servicemanagement.v1.Rollout rollouts = 1;</code> */ public Builder addRollouts(com.google.api.servicemanagement.v1.Rollout value) { if (rolloutsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRolloutsIsMutable(); rollouts_.add(value); onChanged(); } else { rolloutsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of rollout resources. * </pre> * * <code>repeated .google.api.servicemanagement.v1.Rollout rollouts = 1;</code> */ public Builder addRollouts(int index, com.google.api.servicemanagement.v1.Rollout value) { if (rolloutsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRolloutsIsMutable(); rollouts_.add(index, value); onChanged(); } else { rolloutsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of rollout resources. * </pre> * * <code>repeated .google.api.servicemanagement.v1.Rollout rollouts = 1;</code> */ public Builder addRollouts( com.google.api.servicemanagement.v1.Rollout.Builder builderForValue) { if (rolloutsBuilder_ == null) { ensureRolloutsIsMutable(); rollouts_.add(builderForValue.build()); onChanged(); } else { rolloutsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of rollout resources. * </pre> * * <code>repeated .google.api.servicemanagement.v1.Rollout rollouts = 1;</code> */ public Builder addRollouts( int index, com.google.api.servicemanagement.v1.Rollout.Builder builderForValue) { if (rolloutsBuilder_ == null) { ensureRolloutsIsMutable(); rollouts_.add(index, builderForValue.build()); onChanged(); } else { rolloutsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of rollout resources. * </pre> * * <code>repeated .google.api.servicemanagement.v1.Rollout rollouts = 1;</code> */ public Builder addAllRollouts( java.lang.Iterable<? extends com.google.api.servicemanagement.v1.Rollout> values) { if (rolloutsBuilder_ == null) { ensureRolloutsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, rollouts_); onChanged(); } else { rolloutsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of rollout resources. * </pre> * * <code>repeated .google.api.servicemanagement.v1.Rollout rollouts = 1;</code> */ public Builder clearRollouts() { if (rolloutsBuilder_ == null) { rollouts_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { rolloutsBuilder_.clear(); } return this; } /** * * * <pre> * The list of rollout resources. * </pre> * * <code>repeated .google.api.servicemanagement.v1.Rollout rollouts = 1;</code> */ public Builder removeRollouts(int index) { if (rolloutsBuilder_ == null) { ensureRolloutsIsMutable(); rollouts_.remove(index); onChanged(); } else { rolloutsBuilder_.remove(index); } return this; } /** * * * <pre> * The list of rollout resources. * </pre> * * <code>repeated .google.api.servicemanagement.v1.Rollout rollouts = 1;</code> */ public com.google.api.servicemanagement.v1.Rollout.Builder getRolloutsBuilder(int index) { return getRolloutsFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of rollout resources. * </pre> * * <code>repeated .google.api.servicemanagement.v1.Rollout rollouts = 1;</code> */ public com.google.api.servicemanagement.v1.RolloutOrBuilder getRolloutsOrBuilder(int index) { if (rolloutsBuilder_ == null) { return rollouts_.get(index); } else { return rolloutsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of rollout resources. * </pre> * * <code>repeated .google.api.servicemanagement.v1.Rollout rollouts = 1;</code> */ public java.util.List<? extends com.google.api.servicemanagement.v1.RolloutOrBuilder> getRolloutsOrBuilderList() { if (rolloutsBuilder_ != null) { return rolloutsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(rollouts_); } } /** * * * <pre> * The list of rollout resources. * </pre> * * <code>repeated .google.api.servicemanagement.v1.Rollout rollouts = 1;</code> */ public com.google.api.servicemanagement.v1.Rollout.Builder addRolloutsBuilder() { return getRolloutsFieldBuilder() .addBuilder(com.google.api.servicemanagement.v1.Rollout.getDefaultInstance()); } /** * * * <pre> * The list of rollout resources. * </pre> * * <code>repeated .google.api.servicemanagement.v1.Rollout rollouts = 1;</code> */ public com.google.api.servicemanagement.v1.Rollout.Builder addRolloutsBuilder(int index) { return getRolloutsFieldBuilder() .addBuilder(index, com.google.api.servicemanagement.v1.Rollout.getDefaultInstance()); } /** * * * <pre> * The list of rollout resources. * </pre> * * <code>repeated .google.api.servicemanagement.v1.Rollout rollouts = 1;</code> */ public java.util.List<com.google.api.servicemanagement.v1.Rollout.Builder> getRolloutsBuilderList() { return getRolloutsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.api.servicemanagement.v1.Rollout, com.google.api.servicemanagement.v1.Rollout.Builder, com.google.api.servicemanagement.v1.RolloutOrBuilder> getRolloutsFieldBuilder() { if (rolloutsBuilder_ == null) { rolloutsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.api.servicemanagement.v1.Rollout, com.google.api.servicemanagement.v1.Rollout.Builder, com.google.api.servicemanagement.v1.RolloutOrBuilder>( rollouts_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); rollouts_ = null; } return rolloutsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The token of the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The token of the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The token of the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The token of the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The token of the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.api.servicemanagement.v1.ListServiceRolloutsResponse) } // @@protoc_insertion_point(class_scope:google.api.servicemanagement.v1.ListServiceRolloutsResponse) private static final com.google.api.servicemanagement.v1.ListServiceRolloutsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.api.servicemanagement.v1.ListServiceRolloutsResponse(); } public static com.google.api.servicemanagement.v1.ListServiceRolloutsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListServiceRolloutsResponse> PARSER = new com.google.protobuf.AbstractParser<ListServiceRolloutsResponse>() { @java.lang.Override public ListServiceRolloutsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListServiceRolloutsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListServiceRolloutsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.api.servicemanagement.v1.ListServiceRolloutsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
google/guava
36,333
guava-tests/test/com/google/common/collect/TableCollectionTest.java
/* * Copyright (C) 2008 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.collect; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.collect.ReflectionFreeAssertThrows.assertThrows; import static com.google.common.collect.Tables.immutableCell; import static com.google.common.collect.Tables.transformValues; import static com.google.common.collect.Tables.transpose; import static com.google.common.collect.Tables.unmodifiableRowSortedTable; import static com.google.common.collect.Tables.unmodifiableTable; import static java.util.Collections.sort; import com.google.common.annotations.GwtCompatible; import com.google.common.annotations.GwtIncompatible; import com.google.common.annotations.J2ktIncompatible; import com.google.common.base.Function; import com.google.common.base.Functions; import com.google.common.collect.Table.Cell; import com.google.common.collect.testing.CollectionTestSuiteBuilder; import com.google.common.collect.testing.MapInterfaceTest; import com.google.common.collect.testing.SampleElements; import com.google.common.collect.testing.SetTestSuiteBuilder; import com.google.common.collect.testing.SortedSetTestSuiteBuilder; import com.google.common.collect.testing.TestSetGenerator; import com.google.common.collect.testing.TestStringCollectionGenerator; import com.google.common.collect.testing.TestStringSetGenerator; import com.google.common.collect.testing.TestStringSortedSetGenerator; import com.google.common.collect.testing.features.CollectionFeature; import com.google.common.collect.testing.features.CollectionSize; import com.google.common.collect.testing.features.Feature; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedSet; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import org.jspecify.annotations.NullMarked; import org.jspecify.annotations.Nullable; /** * Collection tests for {@link Table} implementations. * * @author Jared Levy * @author Louis Wasserman */ @GwtCompatible @NullMarked public class TableCollectionTest extends TestCase { @J2ktIncompatible private static final Feature<?>[] COLLECTION_FEATURES = { CollectionSize.ANY, CollectionFeature.ALLOWS_NULL_QUERIES }; @J2ktIncompatible private static final Feature<?>[] COLLECTION_FEATURES_ORDER = { CollectionSize.ANY, CollectionFeature.KNOWN_ORDER, CollectionFeature.ALLOWS_NULL_QUERIES }; @J2ktIncompatible private static final Feature<?>[] COLLECTION_FEATURES_REMOVE = { CollectionSize.ANY, CollectionFeature.SUPPORTS_REMOVE, CollectionFeature.ALLOWS_NULL_QUERIES }; @J2ktIncompatible private static final Feature<?>[] COLLECTION_FEATURES_REMOVE_ORDER = { CollectionSize.ANY, CollectionFeature.KNOWN_ORDER, CollectionFeature.SUPPORTS_REMOVE, CollectionFeature.ALLOWS_NULL_QUERIES }; @J2ktIncompatible @GwtIncompatible // suite @AndroidIncompatible // test-suite builders public static Test suite() { TestSuite suite = new TestSuite(); // Not testing rowKeySet() or columnKeySet() of Table.transformValues() // since the transformation doesn't affect the row and column key sets. suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<String, Integer, Character> table = ArrayTable.create(ImmutableList.copyOf(elements), ImmutableList.of(1, 2)); populateForRowKeySet(table, elements); return table.rowKeySet(); } }) .named("ArrayTable.rowKeySet") .withFeatures( CollectionSize.ONE, CollectionSize.SEVERAL, CollectionFeature.KNOWN_ORDER, CollectionFeature.REJECTS_DUPLICATES_AT_CREATION, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<String, Integer, Character> table = HashBasedTable.create(); populateForRowKeySet(table, elements); return table.rowKeySet(); } }) .named("HashBasedTable.rowKeySet") .withFeatures(COLLECTION_FEATURES_REMOVE) .withFeatures(CollectionFeature.SUPPORTS_ITERATOR_REMOVE) .createTestSuite()); suite.addTest( SortedSetTestSuiteBuilder.using( new TestStringSortedSetGenerator() { @Override protected SortedSet<String> create(String[] elements) { TreeBasedTable<String, Integer, Character> table = TreeBasedTable.create(); populateForRowKeySet(table, elements); return table.rowKeySet(); } @Override public List<String> order(List<String> insertionOrder) { sort(insertionOrder); return insertionOrder; } }) .named("TreeBasedTable.rowKeySet") .withFeatures(COLLECTION_FEATURES_REMOVE_ORDER) .withFeatures(CollectionFeature.SUPPORTS_ITERATOR_REMOVE) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<String, Integer, Character> table = HashBasedTable.create(); populateForRowKeySet(table, elements); return unmodifiableTable(table).rowKeySet(); } }) .named("unmodifiableTable[HashBasedTable].rowKeySet") .withFeatures(COLLECTION_FEATURES) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { RowSortedTable<String, Integer, Character> table = TreeBasedTable.create(); populateForRowKeySet(table, elements); return unmodifiableRowSortedTable(table).rowKeySet(); } @Override public List<String> order(List<String> insertionOrder) { sort(insertionOrder); return insertionOrder; } }) .named("unmodifiableRowSortedTable[TreeBasedTable].rowKeySet") .withFeatures(COLLECTION_FEATURES_ORDER) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<Integer, String, Character> table = ArrayTable.create(ImmutableList.of(1, 2), ImmutableList.copyOf(elements)); populateForColumnKeySet(table, elements); return table.columnKeySet(); } }) .named("ArrayTable.columnKeySet") .withFeatures( CollectionSize.ONE, CollectionSize.SEVERAL, CollectionFeature.KNOWN_ORDER, CollectionFeature.REJECTS_DUPLICATES_AT_CREATION, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<Integer, String, Character> table = HashBasedTable.create(); populateForColumnKeySet(table, elements); return table.columnKeySet(); } }) .named("HashBasedTable.columnKeySet") .withFeatures(COLLECTION_FEATURES_REMOVE) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<Integer, String, Character> table = TreeBasedTable.create(); populateForColumnKeySet(table, elements); return table.columnKeySet(); } @Override public List<String> order(List<String> insertionOrder) { sort(insertionOrder); return insertionOrder; } }) .named("TreeBasedTable.columnKeySet") .withFeatures(COLLECTION_FEATURES_REMOVE_ORDER) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<Integer, String, Character> table = HashBasedTable.create(); populateForColumnKeySet(table, elements); return unmodifiableTable(table).columnKeySet(); } }) .named("unmodifiableTable[HashBasedTable].columnKeySet") .withFeatures(COLLECTION_FEATURES) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { RowSortedTable<Integer, String, Character> table = TreeBasedTable.create(); populateForColumnKeySet(table, elements); return unmodifiableRowSortedTable(table).columnKeySet(); } @Override public List<String> order(List<String> insertionOrder) { sort(insertionOrder); return insertionOrder; } }) .named("unmodifiableRowSortedTable[TreeBasedTable].columnKeySet") .withFeatures(COLLECTION_FEATURES_ORDER) .createTestSuite()); suite.addTest( CollectionTestSuiteBuilder.using( new TestStringCollectionGenerator() { @Override protected Collection<String> create(String[] elements) { List<Integer> rowKeys = new ArrayList<>(); for (int i = 0; i < elements.length; i++) { rowKeys.add(i); } Table<Integer, Character, String> table = ArrayTable.create(rowKeys, ImmutableList.of('a')); populateForValues(table, elements); return table.values(); } }) .named("ArrayTable.values") .withFeatures( CollectionSize.ONE, CollectionSize.SEVERAL, CollectionFeature.ALLOWS_NULL_VALUES, CollectionFeature.KNOWN_ORDER) .createTestSuite()); suite.addTest( CollectionTestSuiteBuilder.using( new TestStringCollectionGenerator() { @Override protected Collection<String> create(String[] elements) { Table<Integer, Character, String> table = HashBasedTable.create(); table.put(1, 'a', "foo"); table.clear(); populateForValues(table, elements); return table.values(); } }) .named("HashBasedTable.values") .withFeatures(COLLECTION_FEATURES_REMOVE) .withFeatures(CollectionFeature.SUPPORTS_ITERATOR_REMOVE) .createTestSuite()); suite.addTest( CollectionTestSuiteBuilder.using( new TestStringCollectionGenerator() { @Override protected Collection<String> create(String[] elements) { Table<Integer, Character, String> table = TreeBasedTable.create(); table.put(1, 'a', "foo"); table.clear(); populateForValues(table, elements); return table.values(); } }) .named("TreeBasedTable.values") .withFeatures(COLLECTION_FEATURES_REMOVE_ORDER) .withFeatures(CollectionFeature.SUPPORTS_ITERATOR_REMOVE) .createTestSuite()); Function<String, String> removeFirstCharacter = new Function<String, String>() { @Override public String apply(String input) { return input.substring(1); } }; suite.addTest( CollectionTestSuiteBuilder.using( new TestStringCollectionGenerator() { @Override protected Collection<String> create(String[] elements) { Table<Integer, Character, String> table = HashBasedTable.create(); for (int i = 0; i < elements.length; i++) { table.put(i, 'a', "x" + checkNotNull(elements[i])); } return transformValues(table, removeFirstCharacter).values(); } }) .named("TransformValues.values") .withFeatures(COLLECTION_FEATURES_REMOVE) .withFeatures(CollectionFeature.SUPPORTS_ITERATOR_REMOVE) .createTestSuite()); suite.addTest( CollectionTestSuiteBuilder.using( new TestStringCollectionGenerator() { @Override protected Collection<String> create(String[] elements) { Table<Integer, Character, String> table = HashBasedTable.create(); table.put(1, 'a', "foo"); table.clear(); populateForValues(table, elements); return unmodifiableTable(table).values(); } }) .named("unmodifiableTable[HashBasedTable].values") .withFeatures(COLLECTION_FEATURES) .createTestSuite()); suite.addTest( CollectionTestSuiteBuilder.using( new TestStringCollectionGenerator() { @Override protected Collection<String> create(String[] elements) { RowSortedTable<Integer, Character, String> table = TreeBasedTable.create(); table.put(1, 'a', "foo"); table.clear(); populateForValues(table, elements); return unmodifiableRowSortedTable(table).values(); } }) .named("unmodifiableTable[TreeBasedTable].values") .withFeatures(COLLECTION_FEATURES_ORDER) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestCellSetGenerator() { @Override public SampleElements<Cell<String, Integer, Character>> samples() { return new SampleElements<>( immutableCell("bar", 1, 'a'), immutableCell("bar", 2, 'b'), immutableCell("bar", 3, (Character) null), immutableCell("bar", 4, 'b'), immutableCell("bar", 5, 'b')); } @Override public Set<Cell<String, Integer, Character>> create(Object... elements) { List<Integer> columnKeys = new ArrayList<>(); for (Object element : elements) { @SuppressWarnings("unchecked") Cell<String, Integer, Character> cell = (Cell<String, Integer, Character>) element; columnKeys.add(cell.getColumnKey()); } Table<String, Integer, Character> table = ArrayTable.create(ImmutableList.of("bar"), columnKeys); for (Object element : elements) { @SuppressWarnings("unchecked") Cell<String, Integer, Character> cell = (Cell<String, Integer, Character>) element; table.put(cell.getRowKey(), cell.getColumnKey(), cell.getValue()); } return table.cellSet(); } @Override Table<String, Integer, Character> createTable() { throw new UnsupportedOperationException(); } }) .named("ArrayTable.cellSet") .withFeatures( CollectionSize.ONE, CollectionSize.SEVERAL, CollectionFeature.KNOWN_ORDER, CollectionFeature.REJECTS_DUPLICATES_AT_CREATION, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestCellSetGenerator() { @Override Table<String, Integer, Character> createTable() { return HashBasedTable.create(); } }) .named("HashBasedTable.cellSet") .withFeatures( CollectionSize.ANY, CollectionFeature.REMOVE_OPERATIONS, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestCellSetGenerator() { @Override Table<String, Integer, Character> createTable() { return TreeBasedTable.create(); } }) .named("TreeBasedTable.cellSet") .withFeatures( CollectionSize.ANY, CollectionFeature.REMOVE_OPERATIONS, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestCellSetGenerator() { @Override Table<String, Integer, Character> createTable() { Table<Integer, String, Character> original = TreeBasedTable.create(); return transpose(original); } }) .named("TransposedTable.cellSet") .withFeatures( CollectionSize.ANY, CollectionFeature.REMOVE_OPERATIONS, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestCellSetGenerator() { @Override Table<String, Integer, Character> createTable() { return HashBasedTable.create(); } @Override public Set<Cell<String, Integer, Character>> create(Object... elements) { Table<String, Integer, Character> table = createTable(); for (Object element : elements) { @SuppressWarnings("unchecked") Cell<String, Integer, Character> cell = (Cell<String, Integer, Character>) element; table.put(cell.getRowKey(), cell.getColumnKey(), cell.getValue()); } return transformValues(table, Functions.<Character>identity()).cellSet(); } }) .named("TransformValues.cellSet") .withFeatures( CollectionSize.ANY, CollectionFeature.ALLOWS_NULL_QUERIES, CollectionFeature.REMOVE_OPERATIONS) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestCellSetGenerator() { @Override Table<String, Integer, Character> createTable() { return unmodifiableTable(HashBasedTable.<String, Integer, Character>create()); } @Override public Set<Cell<String, Integer, Character>> create(Object... elements) { Table<String, Integer, Character> table = HashBasedTable.create(); for (Object element : elements) { @SuppressWarnings("unchecked") Cell<String, Integer, Character> cell = (Cell<String, Integer, Character>) element; table.put(cell.getRowKey(), cell.getColumnKey(), cell.getValue()); } return unmodifiableTable(table).cellSet(); } }) .named("unmodifiableTable[HashBasedTable].cellSet") .withFeatures(CollectionSize.ANY, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestCellSetGenerator() { @Override RowSortedTable<String, Integer, Character> createTable() { return unmodifiableRowSortedTable( TreeBasedTable.<String, Integer, Character>create()); } @Override public Set<Cell<String, Integer, Character>> create(Object... elements) { RowSortedTable<String, Integer, Character> table = TreeBasedTable.create(); for (Object element : elements) { @SuppressWarnings("unchecked") Cell<String, Integer, Character> cell = (Cell<String, Integer, Character>) element; table.put(cell.getRowKey(), cell.getColumnKey(), cell.getValue()); } return unmodifiableRowSortedTable(table).cellSet(); } }) .named("unmodifiableRowSortedTable[TreeBasedTable].cellSet") .withFeatures(CollectionSize.ANY, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Iterable<String> rowKeys = ImmutableSet.copyOf(elements); Iterable<Integer> columnKeys = ImmutableList.of(1, 2, 3); Table<String, Integer, Character> table = ArrayTable.create(rowKeys, columnKeys); populateForRowKeySet(table, elements); return table.column(1).keySet(); } }) .named("ArrayTable.column.keySet") .withFeatures( CollectionSize.ONE, CollectionSize.SEVERAL, CollectionFeature.KNOWN_ORDER, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<String, Integer, Character> table = HashBasedTable.create(); populateForRowKeySet(table, elements); return table.column(1).keySet(); } }) .named("HashBasedTable.column.keySet") .withFeatures(COLLECTION_FEATURES_REMOVE) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<String, Integer, Character> table = TreeBasedTable.create(); populateForRowKeySet(table, elements); return table.column(1).keySet(); } @Override public List<String> order(List<String> insertionOrder) { sort(insertionOrder); return insertionOrder; } }) .named("TreeBasedTable.column.keySet") .withFeatures(COLLECTION_FEATURES_REMOVE_ORDER) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<String, Integer, Character> table = HashBasedTable.create(); populateForRowKeySet(table, elements); return transformValues(table, Functions.toStringFunction()).column(1).keySet(); } }) .named("TransformValues.column.keySet") .withFeatures(COLLECTION_FEATURES_REMOVE) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<String, Integer, Character> table = HashBasedTable.create(); populateForRowKeySet(table, elements); return unmodifiableTable(table).column(1).keySet(); } }) .named("unmodifiableTable[HashBasedTable].column.keySet") .withFeatures(COLLECTION_FEATURES) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { RowSortedTable<String, Integer, Character> table = TreeBasedTable.create(); populateForRowKeySet(table, elements); return unmodifiableRowSortedTable(table).column(1).keySet(); } @Override public List<String> order(List<String> insertionOrder) { sort(insertionOrder); return insertionOrder; } }) .named("unmodifiableRowSortedTable[TreeBasedTable].column.keySet") .withFeatures(COLLECTION_FEATURES_ORDER) .createTestSuite()); return suite; } private static void populateForRowKeySet( Table<String, Integer, Character> table, String[] elements) { for (String row : elements) { table.put(row, 1, 'a'); table.put(row, 2, 'b'); } } private static void populateForColumnKeySet( Table<Integer, String, Character> table, String[] elements) { for (String column : elements) { table.put(1, column, 'a'); table.put(2, column, 'b'); } } private static void populateForValues( Table<Integer, Character, String> table, String[] elements) { for (int i = 0; i < elements.length; i++) { table.put(i, 'a', elements[i]); } } @J2ktIncompatible private abstract static class TestCellSetGenerator implements TestSetGenerator<Cell<String, Integer, Character>> { @Override public SampleElements<Cell<String, Integer, Character>> samples() { return new SampleElements<>( immutableCell("bar", 1, 'a'), immutableCell("bar", 2, 'b'), immutableCell("foo", 3, 'c'), immutableCell("bar", 1, 'b'), immutableCell("cat", 2, 'b')); } @Override public Set<Cell<String, Integer, Character>> create(Object... elements) { Table<String, Integer, Character> table = createTable(); for (Object element : elements) { @SuppressWarnings("unchecked") Cell<String, Integer, Character> cell = (Cell<String, Integer, Character>) element; table.put(cell.getRowKey(), cell.getColumnKey(), cell.getValue()); } return table.cellSet(); } abstract Table<String, Integer, Character> createTable(); @Override @SuppressWarnings("unchecked") public Cell<String, Integer, Character>[] createArray(int length) { return (Cell<String, Integer, Character>[]) new Cell<?, ?, ?>[length]; } @Override public List<Cell<String, Integer, Character>> order( List<Cell<String, Integer, Character>> insertionOrder) { return insertionOrder; } } private abstract static class MapTests extends MapInterfaceTest<String, Integer> { MapTests( boolean allowsNullValues, boolean supportsPut, boolean supportsRemove, boolean supportsClear, boolean supportsIteratorRemove) { super( false, allowsNullValues, supportsPut, supportsRemove, supportsClear, supportsIteratorRemove); } @Override protected String getKeyNotInPopulatedMap() { return "four"; } @Override protected Integer getValueNotInPopulatedMap() { return 4; } } abstract static class RowTests extends MapTests { RowTests( boolean allowsNullValues, boolean supportsPut, boolean supportsRemove, boolean supportsClear, boolean supportsIteratorRemove) { super(allowsNullValues, supportsPut, supportsRemove, supportsClear, supportsIteratorRemove); } abstract Table<Character, String, Integer> makeTable(); @Override protected Map<String, Integer> makeEmptyMap() { return makeTable().row('a'); } @Override protected Map<String, Integer> makePopulatedMap() { Table<Character, String, Integer> table = makeTable(); table.put('a', "one", 1); table.put('a', "two", 2); table.put('a', "three", 3); table.put('b', "four", 4); return table.row('a'); } } static final Function<@Nullable Integer, @Nullable Integer> DIVIDE_BY_2 = new Function<@Nullable Integer, @Nullable Integer>() { @Override public @Nullable Integer apply(@Nullable Integer input) { return (input == null) ? null : input / 2; } }; abstract static class ColumnTests extends MapTests { ColumnTests( boolean allowsNullValues, boolean supportsPut, boolean supportsRemove, boolean supportsClear, boolean supportsIteratorRemove) { super(allowsNullValues, supportsPut, supportsRemove, supportsClear, supportsIteratorRemove); } abstract Table<String, Character, Integer> makeTable(); @Override protected Map<String, Integer> makeEmptyMap() { return makeTable().column('a'); } @Override protected Map<String, Integer> makePopulatedMap() { Table<String, Character, Integer> table = makeTable(); table.put("one", 'a', 1); table.put("two", 'a', 2); table.put("three", 'a', 3); table.put("four", 'b', 4); return table.column('a'); } } private abstract static class MapMapTests extends MapInterfaceTest<String, Map<Integer, Character>> { MapMapTests( boolean allowsNullValues, boolean supportsRemove, boolean supportsClear, boolean supportsIteratorRemove) { super(false, allowsNullValues, false, supportsRemove, supportsClear, supportsIteratorRemove); } @Override protected String getKeyNotInPopulatedMap() { return "cat"; } @Override protected Map<Integer, Character> getValueNotInPopulatedMap() { return ImmutableMap.of(); } /** * The version of this test supplied by {@link MapInterfaceTest} fails for this particular map * implementation, because {@code map.get()} returns a view collection that changes in the * course of a call to {@code remove()}. Thus, the expectation doesn't hold that {@code * map.remove(x)} returns the same value which {@code map.get(x)} did immediately beforehand. */ @Override public void testRemove() { Map<String, Map<Integer, Character>> map; try { map = makePopulatedMap(); } catch (UnsupportedOperationException e) { return; } String keyToRemove = map.keySet().iterator().next(); if (supportsRemove) { int initialSize = map.size(); // var oldValue = map.get(keyToRemove); map.remove(keyToRemove); // This line doesn't hold - see the Javadoc comments above. // assertEquals(expectedValue, oldValue); assertFalse(map.containsKey(keyToRemove)); assertEquals(initialSize - 1, map.size()); } else { assertThrows(UnsupportedOperationException.class, () -> map.remove(keyToRemove)); } assertInvariants(map); } } abstract static class RowMapTests extends MapMapTests { RowMapTests( boolean allowsNullValues, boolean supportsRemove, boolean supportsClear, boolean supportsIteratorRemove) { super(allowsNullValues, supportsRemove, supportsClear, supportsIteratorRemove); } abstract Table<String, Integer, Character> makeTable(); @Override protected Map<String, Map<Integer, Character>> makePopulatedMap() { Table<String, Integer, Character> table = makeTable(); populateTable(table); return table.rowMap(); } // `protected` to work around b/320650932 / KT-67447 runtime crash protected final void populateTable(Table<String, Integer, Character> table) { table.put("foo", 1, 'a'); table.put("bar", 1, 'b'); table.put("foo", 3, 'c'); } @Override protected Map<String, Map<Integer, Character>> makeEmptyMap() { return makeTable().rowMap(); } } static final Function<@Nullable String, @Nullable Character> FIRST_CHARACTER = new Function<@Nullable String, @Nullable Character>() { @Override public @Nullable Character apply(@Nullable String input) { return input == null ? null : input.charAt(0); } }; abstract static class ColumnMapTests extends MapMapTests { ColumnMapTests( boolean allowsNullValues, boolean supportsRemove, boolean supportsClear, boolean supportsIteratorRemove) { super(allowsNullValues, supportsRemove, supportsClear, supportsIteratorRemove); } abstract Table<Integer, String, Character> makeTable(); @Override protected Map<String, Map<Integer, Character>> makePopulatedMap() { Table<Integer, String, Character> table = makeTable(); table.put(1, "foo", 'a'); table.put(1, "bar", 'b'); table.put(3, "foo", 'c'); return table.columnMap(); } @Override protected Map<String, Map<Integer, Character>> makeEmptyMap() { return makeTable().columnMap(); } } }
apache/fineract
36,524
fineract-provider/src/main/java/org/apache/fineract/portfolio/savings/service/SavingsApplicationProcessWritePlatformServiceJpaRepositoryImpl.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.fineract.portfolio.savings.service; import static org.apache.fineract.portfolio.savings.SavingsApiConstants.SAVINGS_ACCOUNT_RESOURCE_NAME; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import jakarta.persistence.PersistenceException; import java.math.BigDecimal; import java.util.ArrayList; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.fineract.commands.domain.CommandWrapper; import org.apache.fineract.commands.service.CommandProcessingService; import org.apache.fineract.commands.service.CommandWrapperBuilder; import org.apache.fineract.infrastructure.accountnumberformat.domain.AccountNumberFormat; import org.apache.fineract.infrastructure.accountnumberformat.domain.AccountNumberFormatRepositoryWrapper; import org.apache.fineract.infrastructure.accountnumberformat.domain.EntityAccountType; import org.apache.fineract.infrastructure.core.api.JsonCommand; import org.apache.fineract.infrastructure.core.data.ApiParameterError; import org.apache.fineract.infrastructure.core.data.CommandProcessingResult; import org.apache.fineract.infrastructure.core.data.CommandProcessingResultBuilder; import org.apache.fineract.infrastructure.core.data.DataValidatorBuilder; import org.apache.fineract.infrastructure.core.exception.ErrorHandler; import org.apache.fineract.infrastructure.core.exception.PlatformApiDataValidationException; import org.apache.fineract.infrastructure.dataqueries.data.EntityTables; import org.apache.fineract.infrastructure.dataqueries.data.StatusEnum; import org.apache.fineract.infrastructure.dataqueries.service.EntityDatatableChecksWritePlatformService; import org.apache.fineract.infrastructure.event.business.domain.savings.SavingsApproveBusinessEvent; import org.apache.fineract.infrastructure.event.business.domain.savings.SavingsCreateBusinessEvent; import org.apache.fineract.infrastructure.event.business.domain.savings.SavingsRejectBusinessEvent; import org.apache.fineract.infrastructure.event.business.service.BusinessEventNotifierService; import org.apache.fineract.infrastructure.security.service.PlatformSecurityContext; import org.apache.fineract.organisation.monetary.domain.Money; import org.apache.fineract.organisation.staff.domain.Staff; import org.apache.fineract.organisation.staff.domain.StaffRepositoryWrapper; import org.apache.fineract.portfolio.account.service.AccountNumberGenerator; import org.apache.fineract.portfolio.client.domain.Client; import org.apache.fineract.portfolio.client.domain.ClientRepositoryWrapper; import org.apache.fineract.portfolio.client.exception.ClientNotActiveException; import org.apache.fineract.portfolio.group.domain.Group; import org.apache.fineract.portfolio.group.domain.GroupRepository; import org.apache.fineract.portfolio.group.domain.GroupRepositoryWrapper; import org.apache.fineract.portfolio.group.exception.CenterNotActiveException; import org.apache.fineract.portfolio.group.exception.GroupNotActiveException; import org.apache.fineract.portfolio.group.exception.GroupNotFoundException; import org.apache.fineract.portfolio.note.domain.Note; import org.apache.fineract.portfolio.note.domain.NoteRepository; import org.apache.fineract.portfolio.savings.SavingsApiConstants; import org.apache.fineract.portfolio.savings.data.SavingsAccountDataDTO; import org.apache.fineract.portfolio.savings.data.SavingsAccountDataValidator; import org.apache.fineract.portfolio.savings.domain.GSIMRepositoy; import org.apache.fineract.portfolio.savings.domain.GroupSavingsIndividualMonitoring; import org.apache.fineract.portfolio.savings.domain.SavingsAccount; import org.apache.fineract.portfolio.savings.domain.SavingsAccountAssembler; import org.apache.fineract.portfolio.savings.domain.SavingsAccountCharge; import org.apache.fineract.portfolio.savings.domain.SavingsAccountChargeAssembler; import org.apache.fineract.portfolio.savings.domain.SavingsAccountRepositoryWrapper; import org.apache.fineract.portfolio.savings.domain.SavingsAccountStatusType; import org.apache.fineract.portfolio.savings.domain.SavingsProduct; import org.apache.fineract.portfolio.savings.domain.SavingsProductRepository; import org.apache.fineract.portfolio.savings.exception.SavingsProductNotFoundException; import org.apache.fineract.useradministration.domain.AppUser; import org.springframework.dao.DataAccessException; import org.springframework.transaction.annotation.Transactional; @Slf4j @RequiredArgsConstructor public class SavingsApplicationProcessWritePlatformServiceJpaRepositoryImpl implements SavingsApplicationProcessWritePlatformService { private final PlatformSecurityContext context; private final SavingsAccountRepositoryWrapper savingAccountRepository; private final SavingsAccountAssembler savingAccountAssembler; private final SavingsAccountDataValidator savingsAccountDataValidator; private final AccountNumberGenerator accountNumberGenerator; private final ClientRepositoryWrapper clientRepository; private final GroupRepository groupRepository; private final SavingsProductRepository savingsProductRepository; private final NoteRepository noteRepository; private final StaffRepositoryWrapper staffRepository; private final SavingsAccountApplicationTransitionApiJsonValidator savingsAccountApplicationTransitionApiJsonValidator; private final SavingsAccountChargeAssembler savingsAccountChargeAssembler; private final CommandProcessingService commandProcessingService; private final SavingsAccountDomainService savingsAccountDomainService; private final SavingsAccountWritePlatformService savingsAccountWritePlatformService; private final AccountNumberFormatRepositoryWrapper accountNumberFormatRepository; private final BusinessEventNotifierService businessEventNotifierService; private final EntityDatatableChecksWritePlatformService entityDatatableChecksWritePlatformService; private final GSIMRepositoy gsimRepository; private final GroupRepositoryWrapper groupRepositoryWrapper; private final GroupSavingsIndividualMonitoringWritePlatformService gsimWritePlatformService; @Transactional @Override public CommandProcessingResult submitGSIMApplication(final JsonCommand command) { CommandProcessingResult result = null; JsonArray gsimApplications = command.arrayOfParameterNamed("clientArray"); final Object lock = new Object(); synchronized (lock) { for (JsonElement gsimApplication : gsimApplications) { // result=submitApplication(JsonCommand.fromExistingCommand(command, // gsimApplication)); result = submitApplication(JsonCommand.fromExistingCommand(command, gsimApplication, gsimApplication.getAsJsonObject().get("clientId").getAsLong())); } } return result; } @Transactional @Override public CommandProcessingResult submitApplication(final JsonCommand command) { try { this.savingsAccountDataValidator.validateForSubmit(command.json()); final AppUser submittedBy = this.context.authenticatedUser(); final SavingsAccount account = this.savingAccountAssembler.assembleFrom(command, submittedBy); this.savingAccountRepository.save(account); String accountNumber = ""; GroupSavingsIndividualMonitoring gsimAccount = null; BigDecimal applicationId = BigDecimal.ZERO; Boolean isLastChildApplication = false; // gsim if (account.isAccountNumberRequiresAutoGeneration()) { final AccountNumberFormat accountNumberFormat = this.accountNumberFormatRepository .findByAccountType(EntityAccountType.SAVINGS); // if application is of GSIM type if (account.getAccountTypes() == 5) { final Long groupId = command.longValueOfParameterNamed("groupId"); // GSIM specific parameters if (command.bigDecimalValueOfParameterNamedDefaultToNullIfZero("applicationId") != null) { applicationId = command.bigDecimalValueOfParameterNamedDefaultToNullIfZero("applicationId"); } if (command.booleanObjectValueOfParameterNamed("lastApplication") != null) { isLastChildApplication = command.booleanPrimitiveValueOfParameterNamed("lastApplication"); } Group group = this.groupRepositoryWrapper.findOneWithNotFoundDetection(groupId); if (command.booleanObjectValueOfParameterNamed("isParentAccount") != null && ("1".equals(command.stringValueOfParameterNamed("isParentAccount")) || command.booleanObjectValueOfParameterNamed("isParentAccount"))) { // empty table check if (gsimRepository.count() != 0) { // Parent-Not an empty table accountNumber = this.accountNumberGenerator.generate(account, accountNumberFormat); account.updateAccountNo(accountNumber + "1"); gsimAccount = gsimWritePlatformService.addGSIMAccountInfo(accountNumber, group, BigDecimal.ZERO, Long.valueOf(1), true, SavingsAccountStatusType.SUBMITTED_AND_PENDING_APPROVAL.getValue(), applicationId); account.setGsim(gsimAccount); this.savingAccountRepository.saveAndFlush(account); } else { // Parent-empty table accountNumber = this.accountNumberGenerator.generate(account, accountNumberFormat); account.updateAccountNo(accountNumber + "1"); gsimWritePlatformService.addGSIMAccountInfo(accountNumber, group, BigDecimal.ZERO, Long.valueOf(1), true, SavingsAccountStatusType.SUBMITTED_AND_PENDING_APPROVAL.getValue(), applicationId); account.setGsim(gsimRepository.findOneByAccountNumber(accountNumber)); this.savingAccountRepository.saveAndFlush(account); } } else { if (gsimRepository.count() != 0) { // Child-Not an empty table check if (applicationId.compareTo(BigDecimal.ZERO) == 0) { gsimAccount = gsimRepository.findOneByIsAcceptingChildAndApplicationIdAndGroupId(true, applicationId, groupId); } else { gsimAccount = gsimRepository.findOneByIsAcceptingChildAndApplicationId(true, applicationId); } accountNumber = gsimAccount.getAccountNumber() + (gsimAccount.getChildAccountsCount() + 1); account.updateAccountNo(accountNumber); this.gsimWritePlatformService.incrementChildAccountCount(gsimAccount); account.setGsim(gsimAccount); this.savingAccountRepository.saveAndFlush(account); } else { // Child-empty table // if the gsim info is empty set the current account // as parent accountNumber = this.accountNumberGenerator.generate(account, accountNumberFormat); account.updateAccountNo(accountNumber + "1"); gsimWritePlatformService.addGSIMAccountInfo(accountNumber, group, BigDecimal.ZERO, Long.valueOf(1), true, SavingsAccountStatusType.SUBMITTED_AND_PENDING_APPROVAL.getValue(), applicationId); account.setGsim(gsimAccount); this.savingAccountRepository.saveAndFlush(account); } // reset isAcceptingChild when processing last // application of GSIM if (isLastChildApplication) { this.gsimWritePlatformService .resetIsAcceptingChild(gsimRepository.findOneByIsAcceptingChildAndApplicationId(true, applicationId)); } } } else { // for applications other than GSIM generateAccountNumber(account); } } // end of gsim final Long savingsId = account.getId(); if (command.parameterExists(SavingsApiConstants.datatables)) { this.entityDatatableChecksWritePlatformService.saveDatatables(StatusEnum.CREATE.getValue(), EntityTables.SAVINGS.getName(), savingsId, account.productId(), command.arrayOfParameterNamed(SavingsApiConstants.datatables)); } this.entityDatatableChecksWritePlatformService.runTheCheckForProduct(savingsId, EntityTables.SAVINGS.getName(), StatusEnum.CREATE.getValue(), EntityTables.SAVINGS.getForeignKeyColumnNameOnDatatable(), account.productId()); businessEventNotifierService.notifyPostBusinessEvent(new SavingsCreateBusinessEvent(account)); return new CommandProcessingResultBuilder() // .withCommandId(command.commandId()) // .withEntityId(savingsId) // .withOfficeId(account.officeId()) // .withClientId(account.clientId()) // .withGroupId(account.groupId()) // .withSavingsId(savingsId) // .withGsimId(gsimAccount == null ? 0 : gsimAccount.getId()).build(); } catch (final DataAccessException dve) { handleDataIntegrityIssues(command, dve.getMostSpecificCause(), dve); return CommandProcessingResult.empty(); } catch (final PersistenceException dve) { Throwable throwable = ExceptionUtils.getRootCause(dve.getCause()); handleDataIntegrityIssues(command, throwable, dve); return CommandProcessingResult.empty(); } } private void generateAccountNumber(final SavingsAccount account) { if (account.isAccountNumberRequiresAutoGeneration()) { final AccountNumberFormat accountNumberFormat = this.accountNumberFormatRepository.findByAccountType(EntityAccountType.SAVINGS); account.updateAccountNo(this.accountNumberGenerator.generate(account, accountNumberFormat)); this.savingAccountRepository.saveAndFlush(account); } } @Transactional @Override public CommandProcessingResult modifyGSIMApplication(final Long gsimId, final JsonCommand command) { final Long parentSavingId = gsimId; List<SavingsAccount> childSavings = this.savingAccountRepository.findByGsimId(parentSavingId); CommandProcessingResult result = null; for (SavingsAccount account : childSavings) { result = modifyApplication(account.getId(), command); } return result; } @Transactional @Override public CommandProcessingResult modifyApplication(final Long savingsId, final JsonCommand command) { try { this.savingsAccountDataValidator.validateForUpdate(command.json()); final Map<String, Object> changes = new LinkedHashMap<>(20); final SavingsAccount account = this.savingAccountAssembler.assembleFrom(savingsId, false); checkClientOrGroupActive(account); account.modifyApplication(command, changes); account.validateNewApplicationState(SAVINGS_ACCOUNT_RESOURCE_NAME); account.validateAccountValuesWithProduct(); if (!changes.isEmpty()) { if (changes.containsKey(SavingsApiConstants.clientIdParamName)) { final Long clientId = command.longValueOfParameterNamed(SavingsApiConstants.clientIdParamName); if (clientId != null) { final Client client = this.clientRepository.findOneWithNotFoundDetection(clientId); if (client.isNotActive()) { throw new ClientNotActiveException(clientId); } account.update(client); } else { final Client client = null; account.update(client); } } if (changes.containsKey(SavingsApiConstants.groupIdParamName)) { final Long groupId = command.longValueOfParameterNamed(SavingsApiConstants.groupIdParamName); if (groupId != null) { final Group group = this.groupRepository.findById(groupId).orElseThrow(() -> new GroupNotFoundException(groupId)); if (group.isNotActive()) { if (group.isCenter()) { throw new CenterNotActiveException(groupId); } throw new GroupNotActiveException(groupId); } account.update(group); } else { final Group group = null; account.update(group); } } if (changes.containsKey(SavingsApiConstants.productIdParamName)) { final Long productId = command.longValueOfParameterNamed(SavingsApiConstants.productIdParamName); final SavingsProduct product = this.savingsProductRepository.findById(productId) .orElseThrow(() -> new SavingsProductNotFoundException(productId)); account.update(product); } if (changes.containsKey(SavingsApiConstants.fieldOfficerIdParamName)) { final Long fieldOfficerId = command.longValueOfParameterNamed(SavingsApiConstants.fieldOfficerIdParamName); Staff fieldOfficer = null; if (fieldOfficerId != null) { fieldOfficer = this.staffRepository.findOneWithNotFoundDetection(fieldOfficerId); } else { changes.put(SavingsApiConstants.fieldOfficerIdParamName, ""); } account.update(fieldOfficer); } if (changes.containsKey("charges")) { final Set<SavingsAccountCharge> charges = this.savingsAccountChargeAssembler.fromParsedJson(command.parsedJson(), account.getCurrency().getCode()); final boolean updated = account.update(charges); if (!updated) { changes.remove("charges"); } } this.savingAccountRepository.saveAndFlush(account); } return new CommandProcessingResultBuilder() // .withCommandId(command.commandId()) // .withEntityId(savingsId) // .withOfficeId(account.officeId()) // .withClientId(account.clientId()) // .withGroupId(account.groupId()) // .withSavingsId(savingsId) // .with(changes) // .build(); } catch (final DataAccessException dve) { handleDataIntegrityIssues(command, dve.getMostSpecificCause(), dve); return CommandProcessingResult.resourceResult(-1L); } catch (final PersistenceException dve) { Throwable throwable = ExceptionUtils.getRootCause(dve.getCause()); handleDataIntegrityIssues(command, throwable, dve); return CommandProcessingResult.empty(); } } @Transactional @Override public CommandProcessingResult deleteApplication(final Long savingsId) { final SavingsAccount account = this.savingAccountAssembler.assembleFrom(savingsId, false); checkClientOrGroupActive(account); if (account.isNotSubmittedAndPendingApproval()) { final List<ApiParameterError> dataValidationErrors = new ArrayList<>(); final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors) .resource(SAVINGS_ACCOUNT_RESOURCE_NAME + SavingsApiConstants.deleteApplicationAction); baseDataValidator.reset().parameter(SavingsApiConstants.activatedOnDateParamName) .failWithCodeNoParameterAddedToErrorCode("not.in.submittedandpendingapproval.state"); if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); } } this.noteRepository.deleteAllBySavingsAccount(account); this.savingAccountRepository.delete(account); return new CommandProcessingResultBuilder() // .withEntityId(savingsId) // .withOfficeId(account.officeId()) // .withClientId(account.clientId()) // .withGroupId(account.groupId()) // .withSavingsId(savingsId) // .build(); } @Transactional @Override public CommandProcessingResult approveGSIMApplication(final Long gsimId, final JsonCommand command) { // GroupLoanIndividualMonitoringAccount // glimAccount=glimRepository.findOne(loanId); Long parentSavingId = gsimId; GroupSavingsIndividualMonitoring parentSavings = gsimRepository.findById(parentSavingId).orElseThrow(); List<SavingsAccount> childSavings = this.savingAccountRepository.findByGsimId(gsimId); CommandProcessingResult result = null; int count = 0; for (SavingsAccount account : childSavings) { result = approveApplication(account.getId(), command); if (result != null) { count++; if (count == parentSavings.getChildAccountsCount()) { parentSavings.setSavingsStatus(SavingsAccountStatusType.APPROVED.getValue()); gsimRepository.save(parentSavings); } } } return result; } @Transactional @Override public CommandProcessingResult approveApplication(final Long savingsId, final JsonCommand command) { final AppUser currentUser = this.context.authenticatedUser(); this.savingsAccountApplicationTransitionApiJsonValidator.validateApproval(command.json()); final SavingsAccount savingsAccount = this.savingAccountAssembler.assembleFrom(savingsId, false); checkClientOrGroupActive(savingsAccount); entityDatatableChecksWritePlatformService.runTheCheckForProduct(savingsId, EntityTables.SAVINGS.getName(), StatusEnum.APPROVE.getValue(), EntityTables.SAVINGS.getForeignKeyColumnNameOnDatatable(), savingsAccount.productId()); final Map<String, Object> changes = savingsAccount.approveApplication(currentUser, command); if (!changes.isEmpty()) { this.savingAccountRepository.save(savingsAccount); final String noteText = command.stringValueOfParameterNamed("note"); if (StringUtils.isNotBlank(noteText)) { final Note note = Note.savingNote(savingsAccount, noteText); changes.put("note", noteText); this.noteRepository.save(note); } } businessEventNotifierService.notifyPostBusinessEvent(new SavingsApproveBusinessEvent(savingsAccount)); return new CommandProcessingResultBuilder() // .withCommandId(command.commandId()) // .withEntityId(savingsId) // .withOfficeId(savingsAccount.officeId()) // .withClientId(savingsAccount.clientId()) // .withGroupId(savingsAccount.groupId()) // .withSavingsId(savingsId) // .with(changes) // .build(); } @Transactional @Override public CommandProcessingResult undoGSIMApplicationApproval(final Long gsimId, final JsonCommand command) { final Long parentSavingId = gsimId; GroupSavingsIndividualMonitoring parentSavings = gsimRepository.findById(parentSavingId).orElseThrow(); List<SavingsAccount> childSavings = this.savingAccountRepository.findByGsimId(gsimId); CommandProcessingResult result = null; int count = 0; for (SavingsAccount account : childSavings) { result = undoApplicationApproval(account.getId(), command); if (result != null) { count++; if (count == parentSavings.getChildAccountsCount()) { parentSavings.setSavingsStatus(SavingsAccountStatusType.SUBMITTED_AND_PENDING_APPROVAL.getValue()); gsimRepository.save(parentSavings); } } } return result; } @Transactional @Override public CommandProcessingResult undoApplicationApproval(final Long savingsId, final JsonCommand command) { this.context.authenticatedUser(); this.savingsAccountApplicationTransitionApiJsonValidator.validateForUndo(command.json()); final SavingsAccount savingsAccount = this.savingAccountAssembler.assembleFrom(savingsId, false); checkClientOrGroupActive(savingsAccount); final Map<String, Object> changes = savingsAccount.undoApplicationApproval(); if (!changes.isEmpty()) { this.savingAccountRepository.save(savingsAccount); final String noteText = command.stringValueOfParameterNamed("note"); if (StringUtils.isNotBlank(noteText)) { final Note note = Note.savingNote(savingsAccount, noteText); changes.put("note", noteText); this.noteRepository.save(note); } } return new CommandProcessingResultBuilder() // .withCommandId(command.commandId()) // .withEntityId(savingsId) // .withOfficeId(savingsAccount.officeId()) // .withClientId(savingsAccount.clientId()) // .withGroupId(savingsAccount.groupId()) // .withSavingsId(savingsId) // .with(changes) // .build(); } @Transactional @Override public CommandProcessingResult rejectGSIMApplication(final Long gsimId, final JsonCommand command) { final Long parentSavingId = gsimId; GroupSavingsIndividualMonitoring parentSavings = gsimRepository.findById(parentSavingId).orElseThrow(); List<SavingsAccount> childSavings = this.savingAccountRepository.findByGsimId(gsimId); CommandProcessingResult result = null; int count = 0; for (SavingsAccount account : childSavings) { result = rejectApplication(account.getId(), command); if (result != null) { count++; if (count == parentSavings.getChildAccountsCount()) { parentSavings.setSavingsStatus(SavingsAccountStatusType.REJECTED.getValue()); gsimRepository.save(parentSavings); } } } return result; } @Transactional @Override public CommandProcessingResult rejectApplication(final Long savingsId, final JsonCommand command) { final AppUser currentUser = this.context.authenticatedUser(); this.savingsAccountApplicationTransitionApiJsonValidator.validateRejection(command.json()); final SavingsAccount savingsAccount = this.savingAccountAssembler.assembleFrom(savingsId, false); checkClientOrGroupActive(savingsAccount); entityDatatableChecksWritePlatformService.runTheCheckForProduct(savingsId, EntityTables.SAVINGS.getName(), StatusEnum.REJECTED.getValue(), EntityTables.SAVINGS.getForeignKeyColumnNameOnDatatable(), savingsAccount.productId()); final Map<String, Object> changes = savingsAccount.rejectApplication(currentUser, command); if (!changes.isEmpty()) { this.savingAccountRepository.save(savingsAccount); final String noteText = command.stringValueOfParameterNamed("note"); if (StringUtils.isNotBlank(noteText)) { final Note note = Note.savingNote(savingsAccount, noteText); changes.put("note", noteText); this.noteRepository.save(note); } } businessEventNotifierService.notifyPostBusinessEvent(new SavingsRejectBusinessEvent(savingsAccount)); return new CommandProcessingResultBuilder() // .withCommandId(command.commandId()) // .withEntityId(savingsId) // .withOfficeId(savingsAccount.officeId()) // .withClientId(savingsAccount.clientId()) // .withGroupId(savingsAccount.groupId()) // .withSavingsId(savingsId) // .with(changes) // .build(); } @Transactional @Override public CommandProcessingResult applicantWithdrawsFromApplication(final Long savingsId, final JsonCommand command) { final AppUser currentUser = this.context.authenticatedUser(); this.savingsAccountApplicationTransitionApiJsonValidator.validateApplicantWithdrawal(command.json()); final SavingsAccount savingsAccount = this.savingAccountAssembler.assembleFrom(savingsId, false); checkClientOrGroupActive(savingsAccount); entityDatatableChecksWritePlatformService.runTheCheckForProduct(savingsId, EntityTables.SAVINGS.getName(), StatusEnum.WITHDRAWN.getValue(), EntityTables.SAVINGS.getForeignKeyColumnNameOnDatatable(), savingsAccount.productId()); final Map<String, Object> changes = savingsAccount.applicantWithdrawsFromApplication(currentUser, command); if (!changes.isEmpty()) { this.savingAccountRepository.save(savingsAccount); final String noteText = command.stringValueOfParameterNamed("note"); if (StringUtils.isNotBlank(noteText)) { final Note note = Note.savingNote(savingsAccount, noteText); changes.put("note", noteText); this.noteRepository.save(note); } } return new CommandProcessingResultBuilder() // .withCommandId(command.commandId()) // .withEntityId(savingsId) // .withOfficeId(savingsAccount.officeId()) // .withClientId(savingsAccount.clientId()) // .withGroupId(savingsAccount.groupId()) // .withSavingsId(savingsId) // .with(changes) // .build(); } private void checkClientOrGroupActive(final SavingsAccount account) { final Client client = account.getClient(); if (client != null) { if (client.isNotActive()) { throw new ClientNotActiveException(client.getId()); } } final Group group = account.group(); if (group != null) { if (group.isNotActive()) { if (group.isCenter()) { throw new CenterNotActiveException(group.getId()); } throw new GroupNotActiveException(group.getId()); } } } @Override public CommandProcessingResult createActiveApplication(final SavingsAccountDataDTO savingsAccountDataDTO) { final CommandWrapper commandWrapper = new CommandWrapperBuilder().savingsAccountActivation(null).build(); boolean rollbackTransaction = this.commandProcessingService.validateRollbackCommand(commandWrapper, savingsAccountDataDTO.getAppliedBy()); final SavingsAccount account = this.savingAccountAssembler.assembleFrom(savingsAccountDataDTO.getClient(), savingsAccountDataDTO.getGroup(), savingsAccountDataDTO.getSavingsProduct(), savingsAccountDataDTO.getApplicationDate(), savingsAccountDataDTO.getAppliedBy()); account.approveAndActivateApplication(savingsAccountDataDTO.getApplicationDate(), savingsAccountDataDTO.getAppliedBy()); Money amountForDeposit = account.activateWithBalance(); final Set<Long> existingTransactionIds = new HashSet<>(); final Set<Long> existingReversedTransactionIds = new HashSet<>(); if (amountForDeposit.isGreaterThanZero()) { this.savingAccountRepository.save(account); } this.savingsAccountWritePlatformService.processPostActiveActions(account, savingsAccountDataDTO.getFmt(), existingTransactionIds, existingReversedTransactionIds); this.savingAccountRepository.saveAndFlush(account); generateAccountNumber(account); // post journal entries for activation charges this.savingsAccountDomainService.postJournalEntries(account, existingTransactionIds, existingReversedTransactionIds, false); return new CommandProcessingResultBuilder() // .withSavingsId(account.getId()) // .setRollbackTransaction(rollbackTransaction)// .build(); } /* * Guaranteed to throw an exception no matter what the data integrity issue is. */ private void handleDataIntegrityIssues(final JsonCommand command, final Throwable realCause, final Exception dve) { String msgCode = "error.msg." + SavingsApiConstants.SAVINGS_ACCOUNT_RESOURCE_NAME; String msg = "Unknown data integrity issue with savings account."; String param = null; Object[] msgArgs; Throwable checkEx = realCause == null ? dve : realCause; if (checkEx.getMessage().contains("sa_account_no_UNIQUE")) { final String accountNo = command.stringValueOfParameterNamed("accountNo"); msgCode += ".duplicate.accountNo"; msg = "Savings account with accountNo " + accountNo + " already exists"; param = "accountNo"; msgArgs = new Object[] { accountNo, dve }; } else if (checkEx.getMessage().contains("sa_externalid_UNIQUE")) { final String externalId = command.stringValueOfParameterNamed("externalId"); msgCode += ".duplicate.externalId"; msg = "Savings account with externalId " + externalId + " already exists"; param = "externalId"; msgArgs = new Object[] { externalId, dve }; } else { msgCode += ".unknown.data.integrity.issue"; msgArgs = new Object[] { dve }; } log.error("Error occured.", dve); throw ErrorHandler.getMappable(dve, msgCode, msg, param, msgArgs); } }
apache/kafka
36,440
streams/src/main/java/org/apache/kafka/streams/processor/internals/StateDirectory.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.streams.processor.internals; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.utils.LogContext; import org.apache.kafka.common.utils.Time; import org.apache.kafka.common.utils.Utils; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.errors.ProcessorStateException; import org.apache.kafka.streams.errors.StreamsException; import org.apache.kafka.streams.errors.TaskCorruptedException; import org.apache.kafka.streams.internals.StreamsConfigUtils; import org.apache.kafka.streams.processor.TaskId; import org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl; import org.apache.kafka.streams.state.internals.ThreadCache; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.ObjectMapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.FileFilter; import java.io.IOException; import java.nio.channels.FileChannel; import java.nio.channels.FileLock; import java.nio.channels.OverlappingFileLockException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; import java.nio.file.attribute.PosixFilePermission; import java.nio.file.attribute.PosixFilePermissions; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Predicate; import java.util.regex.Pattern; import java.util.stream.Collectors; import static org.apache.kafka.streams.processor.internals.StateManagerUtil.CHECKPOINT_FILE_NAME; import static org.apache.kafka.streams.processor.internals.StateManagerUtil.parseTaskDirectoryName; /** * Manages the directories where the state of Tasks owned by a {@link StreamThread} are * stored. Handles creation/locking/unlocking/cleaning of the Task Directories. This class is not * thread-safe. */ public class StateDirectory implements AutoCloseable { private static final Pattern TASK_DIR_PATH_NAME = Pattern.compile("\\d+_\\d+"); private static final Pattern NAMED_TOPOLOGY_DIR_PATH_NAME = Pattern.compile("__.+__"); // named topology dirs follow '__Topology-Name__' private static final Logger log = LoggerFactory.getLogger(StateDirectory.class); static final String LOCK_FILE_NAME = ".lock"; /* The process file is used to persist the process id across restarts. * For compatibility reasons you should only ever add fields to the json schema */ static final String PROCESS_FILE_NAME = "kafka-streams-process-metadata"; @JsonIgnoreProperties(ignoreUnknown = true) static class StateDirectoryProcessFile { @JsonProperty private final UUID processId; // required by jackson -- do not remove, your IDE may be warning that this is unused but it's lying to you public StateDirectoryProcessFile() { this.processId = null; } StateDirectoryProcessFile(final UUID processId) { this.processId = processId; } } private final Object taskDirCreationLock = new Object(); private final Time time; private final String appId; private final File stateDir; private final boolean hasPersistentStores; private final boolean hasNamedTopologies; private final ConcurrentMap<TaskId, Thread> lockedTasksToOwner = new ConcurrentHashMap<>(); private FileChannel stateDirLockChannel; private FileLock stateDirLock; private final StreamsConfig config; private final ConcurrentMap<TaskId, Task> tasksForLocalState = new ConcurrentHashMap<>(); /** * Ensures that the state base directory as well as the application's sub-directory are created. * * @param config streams application configuration to read the root state directory path * @param time system timer used to execute periodic cleanup procedure * @param hasPersistentStores only when the application's topology does have stores persisted on local file * system, we would go ahead and auto-create the corresponding application / task / store * directories whenever necessary; otherwise no directories would be created. * @param hasNamedTopologies whether this application is composed of independent named topologies * * @throws ProcessorStateException if the base state directory or application state directory does not exist * and could not be created when hasPersistentStores is enabled. */ public StateDirectory(final StreamsConfig config, final Time time, final boolean hasPersistentStores, final boolean hasNamedTopologies) { this.time = time; this.hasPersistentStores = hasPersistentStores; this.hasNamedTopologies = hasNamedTopologies; this.appId = config.getString(StreamsConfig.APPLICATION_ID_CONFIG); this.config = config; final String stateDirName = config.getString(StreamsConfig.STATE_DIR_CONFIG); final File baseDir = new File(stateDirName); stateDir = new File(baseDir, appId); if (this.hasPersistentStores) { if (!baseDir.exists() && !baseDir.mkdirs()) { throw new ProcessorStateException( String.format("base state directory [%s] doesn't exist and couldn't be created", stateDirName)); } if (!stateDir.exists() && !stateDir.mkdir()) { throw new ProcessorStateException( String.format("state directory [%s] doesn't exist and couldn't be created", stateDir.getPath())); } else if (stateDir.exists() && !stateDir.isDirectory()) { throw new ProcessorStateException( String.format("state directory [%s] can't be created as there is an existing file with the same name", stateDir.getPath())); } if (stateDirName.startsWith(System.getProperty("java.io.tmpdir"))) { log.warn("Using an OS temp directory in the state.dir property can cause failures with writing" + " the checkpoint file due to the fact that this directory can be cleared by the OS." + " Resolved state.dir: [" + stateDirName + "]"); } // change the dir permission to "rwxr-x---" to avoid world readable configurePermissions(baseDir); configurePermissions(stateDir); } } private void configurePermissions(final File file) { final Path path = file.toPath(); if (path.getFileSystem().supportedFileAttributeViews().contains("posix")) { final Set<PosixFilePermission> perms = PosixFilePermissions.fromString("rwxr-x---"); try { Files.setPosixFilePermissions(path, perms); } catch (final IOException e) { log.error("Error changing permissions for the directory {} ", path, e); } } else { boolean set = file.setReadable(true, true); set &= file.setWritable(true, true); set &= file.setExecutable(true, true); if (!set) { log.error("Failed to change permissions for the directory {}", file); } } } /** * @return true if the state directory was successfully locked */ private boolean lockStateDirectory() { final File lockFile = new File(stateDir, LOCK_FILE_NAME); try { stateDirLockChannel = FileChannel.open(lockFile.toPath(), StandardOpenOption.CREATE, StandardOpenOption.WRITE); stateDirLock = tryLock(stateDirLockChannel); } catch (final IOException e) { log.error("Unable to lock the state directory due to unexpected exception", e); throw new ProcessorStateException(String.format("Failed to lock the state directory [%s] during startup", stateDir.getAbsolutePath()), e); } return stateDirLock != null; } public void initializeStartupTasks(final TopologyMetadata topologyMetadata, final StreamsMetricsImpl streamsMetrics, final LogContext logContext) { final List<TaskDirectory> nonEmptyTaskDirectories = listNonEmptyTaskDirectories(); if (hasPersistentStores && !nonEmptyTaskDirectories.isEmpty()) { final ThreadCache dummyCache = new ThreadCache(logContext, 0, streamsMetrics); final boolean eosEnabled = StreamsConfigUtils.eosEnabled(config); final boolean stateUpdaterEnabled = StreamsConfig.InternalConfig.stateUpdaterEnabled(config.originals()); // discover all non-empty task directories in StateDirectory for (final TaskDirectory taskDirectory : nonEmptyTaskDirectories) { final String dirName = taskDirectory.file().getName(); final TaskId id = parseTaskDirectoryName(dirName, taskDirectory.namedTopology()); final ProcessorTopology subTopology = topologyMetadata.buildSubtopology(id); // we still check if the task's sub-topology is stateful, even though we know its directory contains state, // because it's possible that the topology has changed since that data was written, and is now stateless // this therefore prevents us from creating unnecessary Tasks just because of some left-over state if (subTopology.hasStateWithChangelogs()) { final Set<TopicPartition> inputPartitions = topologyMetadata.nodeToSourceTopics(id).values().stream() .flatMap(Collection::stream) .map(t -> new TopicPartition(t, id.partition())) .collect(Collectors.toSet()); final ProcessorStateManager stateManager = ProcessorStateManager.createStartupTaskStateManager( id, eosEnabled, logContext, this, subTopology.storeToChangelogTopic(), inputPartitions, stateUpdaterEnabled ); final InternalProcessorContext<Object, Object> context = new ProcessorContextImpl( id, config, stateManager, streamsMetrics, dummyCache ); final Task task = new StandbyTask( id, inputPartitions, subTopology, topologyMetadata.taskConfig(id), streamsMetrics, stateManager, this, dummyCache, context ); try { task.initializeIfNeeded(); tasksForLocalState.put(id, task); } catch (final TaskCorruptedException e) { // Task is corrupt - wipe it out (under EOS) and don't initialize a Standby for it task.suspend(); task.closeDirty(); } } } } } public boolean hasStartupTasks() { return !tasksForLocalState.isEmpty(); } public Task removeStartupTask(final TaskId taskId) { final Task task = tasksForLocalState.remove(taskId); if (task != null) { lockedTasksToOwner.replace(taskId, Thread.currentThread()); } return task; } public void closeStartupTasks() { closeStartupTasks(t -> true); } private void closeStartupTasks(final Predicate<Task> predicate) { if (!tasksForLocalState.isEmpty()) { // "drain" Tasks first to ensure that we don't try to close Tasks that another thread is attempting to close final Set<Task> drainedTasks = new HashSet<>(tasksForLocalState.size()); for (final Map.Entry<TaskId, Task> entry : tasksForLocalState.entrySet()) { if (predicate.test(entry.getValue()) && removeStartupTask(entry.getKey()) != null) { // only add to our list of drained Tasks if we exclusively "claimed" a Task from tasksForLocalState // to ensure we don't accidentally try to drain the same Task multiple times from concurrent threads drainedTasks.add(entry.getValue()); } } // now that we have exclusive ownership of the drained tasks, close them for (final Task task : drainedTasks) { task.suspend(); task.closeClean(); } } } public UUID initializeProcessId() { if (!hasPersistentStores) { final UUID processId = UUID.randomUUID(); log.info("Created new process id: {}", processId); return processId; } if (!lockStateDirectory()) { log.error("Unable to obtain lock as state directory is already locked by another process"); throw new StreamsException(String.format("Unable to initialize state, this can happen if multiple instances of " + "Kafka Streams are running in the same state directory " + "(current state directory is [%s]", stateDir.getAbsolutePath())); } final File processFile = new File(stateDir, PROCESS_FILE_NAME); final ObjectMapper mapper = new ObjectMapper(); try { if (processFile.exists()) { try { final StateDirectoryProcessFile processFileData = mapper.readValue(processFile, StateDirectoryProcessFile.class); log.info("Reading UUID from process file: {}", processFileData.processId); if (processFileData.processId != null) { return processFileData.processId; } } catch (final Exception e) { log.warn("Failed to read json process file", e); } } final StateDirectoryProcessFile processFileData = new StateDirectoryProcessFile(UUID.randomUUID()); log.info("No process id found on disk, got fresh process id {}", processFileData.processId); mapper.writeValue(processFile, processFileData); return processFileData.processId; } catch (final IOException e) { log.error("Unable to read/write process file due to unexpected exception", e); throw new ProcessorStateException(e); } } /** * Get or create the directory for the provided {@link TaskId}. * @return directory for the {@link TaskId} * @throws ProcessorStateException if the task directory does not exist and could not be created */ public File getOrCreateDirectoryForTask(final TaskId taskId) { final File taskParentDir = getTaskDirectoryParentName(taskId); final File taskDir = new File(taskParentDir, StateManagerUtil.toTaskDirString(taskId)); if (hasPersistentStores) { if (!taskDir.exists()) { synchronized (taskDirCreationLock) { // to avoid a race condition, we need to check again if the directory does not exist: // otherwise, two threads might pass the outer `if` (and enter the `then` block), // one blocks on `synchronized` while the other creates the directory, // and the blocking one fails when trying to create it after it's unblocked if (!taskParentDir.exists() && !taskParentDir.mkdir()) { throw new ProcessorStateException( String.format("Parent [%s] of task directory [%s] doesn't exist and couldn't be created", taskParentDir.getPath(), taskDir.getPath())); } if (!taskDir.exists() && !taskDir.mkdir()) { throw new ProcessorStateException( String.format("task directory [%s] doesn't exist and couldn't be created", taskDir.getPath())); } } } else if (!taskDir.isDirectory()) { throw new ProcessorStateException( String.format("state directory [%s] can't be created as there is an existing file with the same name", taskDir.getPath())); } } return taskDir; } private File getTaskDirectoryParentName(final TaskId taskId) { final String namedTopology = taskId.topologyName(); if (namedTopology != null) { if (!hasNamedTopologies) { throw new IllegalStateException("Tried to lookup taskId with named topology, but StateDirectory thinks hasNamedTopologies = false"); } return new File(stateDir, getNamedTopologyDirName(namedTopology)); } else { return stateDir; } } private String getNamedTopologyDirName(final String topologyName) { return "__" + topologyName + "__"; } /** * @return The File handle for the checkpoint in the given task's directory */ File checkpointFileFor(final TaskId taskId) { return new File(getOrCreateDirectoryForTask(taskId), StateManagerUtil.CHECKPOINT_FILE_NAME); } /** * Decide if the directory of the task is empty or not */ boolean directoryForTaskIsEmpty(final TaskId taskId) { final File taskDir = getOrCreateDirectoryForTask(taskId); return taskDirIsEmpty(taskDir); } private boolean taskDirIsEmpty(final File taskDir) { final File[] storeDirs = taskDir.listFiles(pathname -> !pathname.getName().equals(CHECKPOINT_FILE_NAME)); boolean taskDirEmpty = true; // if the task is stateless, storeDirs would be null if (storeDirs != null && storeDirs.length > 0) { for (final File file : storeDirs) { // We removed the task directory locking but some upgrading applications may still have old lock files on disk, // we just lazily delete those in this method since it's the only thing that would be affected by these if (file.getName().endsWith(LOCK_FILE_NAME)) { if (!file.delete()) { // If we hit an error deleting this just ignore it and move on, we'll retry again at some point log.warn("Error encountered deleting lock file in {}", taskDir); } } else { // If it's not a lock file then the directory is not empty, // but finish up the loop in case there's a lock file left to delete log.trace("TaskDir {} was not empty, found {}", taskDir, file); taskDirEmpty = false; } } } return taskDirEmpty; } /** * Get or create the directory for the global stores. * @return directory for the global stores * @throws ProcessorStateException if the global store directory does not exists and could not be created */ File globalStateDir() { final File dir = new File(stateDir, "global"); if (hasPersistentStores) { if (!dir.exists() && !dir.mkdir()) { throw new ProcessorStateException( String.format("global state directory [%s] doesn't exist and couldn't be created", dir.getPath())); } else if (dir.exists() && !dir.isDirectory()) { throw new ProcessorStateException( String.format("global state directory [%s] can't be created as there is an existing file with the same name", dir.getPath())); } } return dir; } private String logPrefix() { return String.format("stream-thread [%s]", Thread.currentThread().getName()); } /** * Get the lock for the {@link TaskId}s directory if it is available * @param taskId task id * @return true if successful */ synchronized boolean lock(final TaskId taskId) { if (!hasPersistentStores) { return true; } final Thread lockOwner = lockedTasksToOwner.get(taskId); if (lockOwner != null) { if (lockOwner.equals(Thread.currentThread())) { log.trace("{} Found cached state dir lock for task {}", logPrefix(), taskId); // we already own the lock return true; } else { // another thread owns the lock return false; } } else if (!stateDir.exists()) { log.error("Tried to lock task directory for {} but the state directory does not exist", taskId); throw new IllegalStateException("The state directory has been deleted"); } else { lockedTasksToOwner.put(taskId, Thread.currentThread()); return true; } } /** * Unlock the state directory for the given {@link TaskId}. */ synchronized void unlock(final TaskId taskId) { final Thread lockOwner = lockedTasksToOwner.get(taskId); if (lockOwner != null && lockOwner.equals(Thread.currentThread())) { lockedTasksToOwner.remove(taskId); log.debug("{} Released state dir lock for task {}", logPrefix(), taskId); } } /** * Expose for tests. */ Thread lockOwner(final TaskId taskId) { return lockedTasksToOwner.get(taskId); } @Override public void close() { if (hasPersistentStores) { closeStartupTasks(); try { stateDirLock.release(); stateDirLockChannel.close(); stateDirLock = null; stateDirLockChannel = null; } catch (final IOException e) { log.error("Unexpected exception while unlocking the state dir", e); throw new StreamsException(String.format("Failed to release the lock on the state directory [%s]", stateDir.getAbsolutePath()), e); } // all threads should be stopped and cleaned up by now, so none should remain holding a lock if (!lockedTasksToOwner.isEmpty()) { log.error("Some task directories still locked while closing state, this indicates unclean shutdown: {}", lockedTasksToOwner); } } } public synchronized void clean() { try { cleanStateAndTaskDirectoriesCalledByUser(); } catch (final Exception e) { throw new StreamsException(e); } try { if (stateDir.exists()) { Utils.delete(globalStateDir().getAbsoluteFile()); } } catch (final IOException exception) { log.error( String.format("%s Failed to delete global state directory of %s due to an unexpected exception", logPrefix(), appId), exception ); throw new StreamsException(exception); } try { if (hasPersistentStores && stateDir.exists() && !stateDir.delete()) { log.warn( String.format("%s Failed to delete state store directory of %s for it is not empty", logPrefix(), stateDir.getAbsolutePath()) ); } } catch (final SecurityException exception) { log.error( String.format("%s Failed to delete state store directory of %s due to an unexpected exception", logPrefix(), stateDir.getAbsolutePath()), exception ); throw new StreamsException(exception); } } /** * Remove the directories for any {@link TaskId}s that are no-longer * owned by this {@link StreamThread} and aren't locked by either * another process or another {@link StreamThread} * @param cleanupDelayMs only remove directories if they haven't been modified for at least * this amount of time (milliseconds) */ public synchronized void cleanRemovedTasks(final long cleanupDelayMs) { try { cleanRemovedTasksCalledByCleanerThread(cleanupDelayMs); } catch (final Exception cannotHappen) { throw new IllegalStateException("Should have swallowed exception.", cannotHappen); } } private void cleanRemovedTasksCalledByCleanerThread(final long cleanupDelayMs) { for (final TaskDirectory taskDir : listAllTaskDirectories()) { final String dirName = taskDir.file().getName(); final TaskId id = parseTaskDirectoryName(dirName, taskDir.namedTopology()); if (!lockedTasksToOwner.containsKey(id)) { try { if (lock(id)) { final long now = time.milliseconds(); final long lastModifiedMs = taskDir.file().lastModified(); if (now - cleanupDelayMs > lastModifiedMs) { log.info("{} Deleting obsolete state directory {} for task {} as {}ms has elapsed (cleanup delay is {}ms).", logPrefix(), dirName, id, now - lastModifiedMs, cleanupDelayMs); Utils.delete(taskDir.file()); } } } catch (final IOException exception) { log.warn( String.format("%s Swallowed the following exception during deletion of obsolete state directory %s for task %s:", logPrefix(), dirName, id), exception ); } finally { unlock(id); } } } // Ok to ignore returned exception as it should be swallowed maybeCleanEmptyNamedTopologyDirs(true); } /** * Cleans up any leftover named topology directories that are empty, if any exist * @param logExceptionAsWarn if true, an exception will be logged as a warning * if false, an exception will be logged as error * @return the first IOException to be encountered */ private IOException maybeCleanEmptyNamedTopologyDirs(final boolean logExceptionAsWarn) { if (!hasNamedTopologies) { return null; } final AtomicReference<IOException> firstException = new AtomicReference<>(null); final File[] namedTopologyDirs = stateDir.listFiles(pathname -> pathname.isDirectory() && NAMED_TOPOLOGY_DIR_PATH_NAME.matcher(pathname.getName()).matches() ); if (namedTopologyDirs != null) { for (final File namedTopologyDir : namedTopologyDirs) { closeStartupTasks(task -> task.id().topologyName().equals(parseNamedTopologyFromDirectory(namedTopologyDir.getName()))); final File[] contents = namedTopologyDir.listFiles(); if (contents != null && contents.length == 0) { try { Utils.delete(namedTopologyDir); } catch (final IOException exception) { if (logExceptionAsWarn) { log.warn( String.format("%sSwallowed the following exception during deletion of named topology directory %s", logPrefix(), namedTopologyDir.getName()), exception ); } else { log.error( String.format("%s Failed to delete named topology directory %s with exception:", logPrefix(), namedTopologyDir.getName()), exception ); } firstException.compareAndSet(null, exception); } } } } return firstException.get(); } /** * Clears out any local state found for the given NamedTopology after it was removed * * @throws StreamsException if cleanup failed */ public void clearLocalStateForNamedTopology(final String topologyName) { final File namedTopologyDir = new File(stateDir, getNamedTopologyDirName(topologyName)); if (!namedTopologyDir.exists() || !namedTopologyDir.isDirectory()) { log.debug("Tried to clear out the local state for NamedTopology {} but none was found", topologyName); } try { closeStartupTasks(task -> task.id().topologyName().equals(topologyName)); Utils.delete(namedTopologyDir); } catch (final IOException e) { log.error("Hit an unexpected error while clearing local state for topology " + topologyName, e); throw new StreamsException("Unable to delete state for the named topology " + topologyName, e, new TaskId(-1, -1, topologyName)); // use dummy taskid to report source topology for this error } } private void cleanStateAndTaskDirectoriesCalledByUser() throws Exception { if (!lockedTasksToOwner.isEmpty()) { log.warn("Found some still-locked task directories when user requested to cleaning up the state, " + "since Streams is not running any more these will be ignored to complete the cleanup"); } final AtomicReference<Exception> firstException = new AtomicReference<>(); for (final TaskDirectory taskDir : listAllTaskDirectories()) { final String dirName = taskDir.file().getName(); final TaskId id = parseTaskDirectoryName(dirName, taskDir.namedTopology()); try { log.info("{} Deleting task directory {} for {} as user calling cleanup.", logPrefix(), dirName, id); if (lockedTasksToOwner.containsKey(id)) { log.warn("{} Task {} in state directory {} was still locked by {}", logPrefix(), dirName, id, lockedTasksToOwner.get(id)); } Utils.delete(taskDir.file()); } catch (final IOException exception) { log.error( String.format("%s Failed to delete task directory %s for %s with exception:", logPrefix(), dirName, id), exception ); firstException.compareAndSet(null, exception); } } firstException.compareAndSet(null, maybeCleanEmptyNamedTopologyDirs(false)); final Exception exception = firstException.get(); if (exception != null) { throw exception; } } /** * List all of the task directories that are non-empty * @return The list of all the non-empty local directories for stream tasks */ List<TaskDirectory> listNonEmptyTaskDirectories() { return listTaskDirectories(pathname -> { if (!pathname.isDirectory() || !TASK_DIR_PATH_NAME.matcher(pathname.getName()).matches()) { return false; } else { return !taskDirIsEmpty(pathname); } }); } /** * List all of the task directories along with their parent directory if they belong to a named topology * @return The list of all the existing local directories for stream tasks */ List<TaskDirectory> listAllTaskDirectories() { return listTaskDirectories(pathname -> pathname.isDirectory() && TASK_DIR_PATH_NAME.matcher(pathname.getName()).matches()); } private List<TaskDirectory> listTaskDirectories(final FileFilter filter) { final List<TaskDirectory> taskDirectories = new ArrayList<>(); if (hasPersistentStores && stateDir.exists()) { if (hasNamedTopologies) { for (final File namedTopologyDir : listNamedTopologyDirs()) { final String namedTopology = parseNamedTopologyFromDirectory(namedTopologyDir.getName()); final File[] taskDirs = namedTopologyDir.listFiles(filter); if (taskDirs != null) { taskDirectories.addAll(Arrays.stream(taskDirs) .map(f -> new TaskDirectory(f, namedTopology)).collect(Collectors.toList())); } } } else { final File[] taskDirs = stateDir.listFiles(filter); if (taskDirs != null) { taskDirectories.addAll(Arrays.stream(taskDirs) .map(f -> new TaskDirectory(f, null)).collect(Collectors.toList())); } } } return taskDirectories; } private List<File> listNamedTopologyDirs() { final File[] namedTopologyDirectories = stateDir.listFiles(f -> f.getName().startsWith("__") && f.getName().endsWith("__")); return namedTopologyDirectories != null ? Arrays.asList(namedTopologyDirectories) : Collections.emptyList(); } private String parseNamedTopologyFromDirectory(final String dirName) { return dirName.substring(2, dirName.length() - 2); } private FileLock tryLock(final FileChannel channel) throws IOException { try { return channel.tryLock(); } catch (final OverlappingFileLockException e) { return null; } } public static class TaskDirectory { private final File file; private final String namedTopology; // may be null if hasNamedTopologies = false TaskDirectory(final File file, final String namedTopology) { this.file = file; this.namedTopology = namedTopology; } public File file() { return file; } public String namedTopology() { return namedTopology; } @Override public boolean equals(final Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } final TaskDirectory that = (TaskDirectory) o; return file.equals(that.file) && Objects.equals(namedTopology, that.namedTopology); } @Override public int hashCode() { return Objects.hash(file, namedTopology); } } }
apache/pinot
34,464
pinot-common/src/test/java/org/apache/pinot/common/function/JsonFunctionsTest.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pinot.common.function; import com.fasterxml.jackson.core.JsonProcessingException; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.jayway.jsonpath.InvalidJsonException; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import org.apache.pinot.common.function.scalar.JsonFunctions; import org.apache.pinot.spi.utils.JsonUtils; import org.testng.Assert; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertNull; import static org.testng.Assert.assertTrue; public class JsonFunctionsTest { @Test public void testJsonFunction() throws JsonProcessingException { // CHECKSTYLE:OFF // @formatter:off String jsonString = "{" + " \"id\": \"7044885078\"," + " \"type\": \"CreateEvent\"," + " \"actor\": {" + " \"id\": 33500718," + " \"login\": \"dipper-github-icn-bom-cdg\"," + " \"display_login\": \"dipper-github-icn-bom-cdg\"," + " \"gravatar_id\": \"\"," + " \"url\": \"https://api.github.com/users/dipper-github-icn-bom-cdg\"," + " \"avatar_url\": \"https://avatars.githubusercontent.com/u/33500718?\"" + " }," + " \"repo\": {" + " \"id\": 112368043," + " \"name\": \"dipper-github-icn-bom-cdg/test-ruby-sample\"," + " \"url\": \"https://api.github.com/repos/dipper-github-icn-bom-cdg/test-ruby-sample\"" + " }," + " \"payload\": {" + " \"ref\": \"canary-test-7f3af0db-3ffa-4259-894f-950d2c76594b\"," + " \"ref_type\": \"branch\"," + " \"master_branch\": \"master\"," + " \"description\": null," + " \"pusher_type\": \"user\"" + " }," + " \"public\": true," + " \"created_at\": \"2018-01-01T11:12:53Z\"" + "}"; // @formatter:on // CHECKSTYLE:ON assertTrue(JsonFunctions.jsonPathExists(jsonString, "$.actor.id")); assertEquals(JsonFunctions.jsonPathString(jsonString, "$.actor.id"), "33500718"); assertEquals(JsonFunctions.jsonPathLong(jsonString, "$.actor.id"), 33500718L); assertEquals(JsonFunctions.jsonPathDouble(jsonString, "$.actor.id"), 33500718.0); assertFalse(JsonFunctions.jsonPathExists(jsonString, "$.actor.aaa")); assertEquals(JsonFunctions.jsonPathString(jsonString, "$.actor.aaa", "null"), "null"); assertEquals(JsonFunctions.jsonPathString("not json", "$.actor.aaa", "null"), "null"); assertEquals(JsonFunctions.jsonPathString(null, "$.actor.aaa", "null"), "null"); assertEquals(JsonFunctions.jsonPathLong(jsonString, "$.actor.aaa", 100L), 100L); assertEquals(JsonFunctions.jsonPathLong(jsonString, "$.actor.aaa"), Long.MIN_VALUE); assertEquals(JsonFunctions.jsonPathLong("not json", "$.actor.aaa", Long.MIN_VALUE), Long.MIN_VALUE); assertEquals(JsonFunctions.jsonPathLong(null, "$.actor.aaa", Long.MIN_VALUE), Long.MIN_VALUE); assertEquals(JsonFunctions.jsonPathDouble(jsonString, "$.actor.aaa", 53.2), 53.2); assertEquals(JsonFunctions.jsonPathDouble("not json", "$.actor.aaa", 53.2), 53.2); assertEquals(JsonFunctions.jsonPathDouble(null, "$.actor.aaa", 53.2), 53.2); assertTrue(Double.isNaN(JsonFunctions.jsonPathDouble(jsonString, "$.actor.aaa"))); } @Test public void testJsonPathStringWithDefaultValue() throws JsonProcessingException { String jsonString = "{\"name\": \"Pete\", \"age\": 24}"; assertTrue(JsonFunctions.jsonPathExists(jsonString, "$.name")); assertEquals(JsonFunctions.jsonPathString(jsonString, "$.name", "default"), "Pete"); assertFalse(JsonFunctions.jsonPathExists(jsonString, "$.missing")); assertEquals(JsonFunctions.jsonPathString(jsonString, "$.missing", "default"), "default"); assertNull(JsonFunctions.jsonPathString(jsonString, "$.missing", null)); assertTrue(JsonFunctions.jsonPathExists(jsonString, "$.age")); assertEquals(JsonFunctions.jsonPathString(jsonString, "$.age", "default"), "24"); assertEquals(JsonFunctions.jsonPathString(jsonString, "$.age"), "24"); assertEquals(JsonFunctions.jsonPathString(jsonString, "$.age", null), "24"); } @Test public void testJsonPathStringWithoutDefaultValue() throws JsonProcessingException { String jsonString = "{\"name\": \"Pete\", \"age\": 24}"; assertTrue(JsonFunctions.jsonPathExists(jsonString, "$.name")); assertEquals(JsonFunctions.jsonPathString(jsonString, "$.name"), "Pete"); assertFalse(JsonFunctions.jsonPathExists(jsonString, "$.missing")); assertNull(JsonFunctions.jsonPathString(jsonString, "$.missing")); assertNull(JsonFunctions.jsonPathString(jsonString, "$.missing", null)); assertTrue(JsonFunctions.jsonPathExists(jsonString, "$.age")); assertEquals(JsonFunctions.jsonPathString(jsonString, "$.age"), "24"); } @Test public void testJsonPathStringWithInvalidJson() throws JsonProcessingException { try { JsonFunctions.jsonPathString("not json", "$.anything"); Assert.fail("Should have thrown InvalidJsonException"); } catch (InvalidJsonException e) { // Expected } try { JsonFunctions.jsonPathString(null, "$.anything"); Assert.fail("Should have thrown IllegalArgumentException"); } catch (IllegalArgumentException e) { // Expected } assertEquals(JsonFunctions.jsonPathString(null, "$.actor.aaa", "foo"), "foo"); } @Test public void testJsonPathStringWithNullValue() throws JsonProcessingException { String result = JsonFunctions.jsonPathString("{\"foo\": null}", "$.foo"); assertNull(result, "Expected null json value. Received instead " + (result == null ? "Java null value" : result + " of type " + result.getClass())); assertEquals(JsonFunctions.jsonPathString("{\"foo\": null}", "$.foo", "default"), "default"); } @Test public void testJsonPathStringWithStringNull() throws JsonProcessingException { assertEquals(JsonFunctions.jsonPathString("{\"foo\": \"null\"}", "$.foo"), "null"); assertEquals(JsonFunctions.jsonPathString("{\"foo\": \"null\"}", "$.foo", "default"), "null"); } @Test public void testJsonFunctionExtractingArray() throws JsonProcessingException { // CHECKSTYLE:OFF // @formatter:off String jsonString = "{\n" + " \"name\": \"Pete\",\n" + " \"age\": 24,\n" + " \"subjects\": [\n" + " {\n" + " \"name\": \"maths\",\n" + " \"homework_grades\": [80, 85, 90, 95, 100],\n" + " \"grade\": \"A\"\n" + " },\n" + " {\n" + " \"name\": \"english\",\n" + " \"homework_grades\": [60, 65, 70, 85, 90],\n" + " \"grade\": \"B\"\n" + " }\n" + " ]\n" + "}"; // @formatter:on // CHECKSTYLE:ON assertTrue(JsonFunctions.jsonPathExists(jsonString, "$.subjects[*].name")); assertEquals(JsonFunctions.jsonPathArray(jsonString, "$.subjects[*].name"), new String[]{"maths", "english"}); assertEquals(JsonFunctions.jsonPathArray(jsonString, "$.subjects[*].grade"), new String[]{"A", "B"}); assertEquals(JsonFunctions.jsonPathArray(jsonString, "$.subjects[*].homework_grades"), new Object[]{Arrays.asList(80, 85, 90, 95, 100), Arrays.asList(60, 65, 70, 85, 90)}); assertFalse(JsonFunctions.jsonPathExists(jsonString, null)); assertEquals(JsonFunctions.jsonPathArrayDefaultEmpty(jsonString, null), new Object[0]); assertFalse(JsonFunctions.jsonPathExists(jsonString, "not json")); assertEquals(JsonFunctions.jsonPathArrayDefaultEmpty(jsonString, "not json"), new Object[0]); assertTrue(JsonFunctions.jsonPathExists(jsonString, "$.subjects[*].missing")); assertEquals(JsonFunctions.jsonPathArrayDefaultEmpty(jsonString, "$.subjects[*].missing"), new Object[0]); } @Test public void testJsonFunctionExtractingArrayWithMissingField() throws JsonProcessingException { String jsonString = "{\"name\": \"Pete\", \"age\": 24}"; assertEquals(JsonFunctions.jsonPathArray(jsonString, "$.subjects[*].name"), new String[]{}); assertEquals(JsonFunctions.jsonPathArrayDefaultEmpty(jsonString, "$.subjects[*].name"), new String[]{}); assertEquals(JsonFunctions.jsonPathArrayDefaultEmpty(jsonString, "$.subjects[*].grade"), new String[]{}); assertEquals(JsonFunctions.jsonPathArrayDefaultEmpty(jsonString, "$.subjects[*].homework_grades"), new Object[]{}); // jsonPathArrayDefaultEmpty should work fine with existing fields. // CHECKSTYLE:OFF // @formatter:off jsonString = "{\n" + " \"name\": \"Pete\",\n" + " \"age\": 24,\n" + " \"subjects\": [\n" + " {\n" + " \"name\": \"maths\",\n" + " \"homework_grades\": [80, 85, 90, 95, 100],\n" + " \"grade\": \"A\"\n" + " },\n" + " {\n" + " \"name\": \"english\",\n" + " \"homework_grades\": [60, 65, 70, 85, 90],\n" + " \"grade\": \"B\"\n" + " }\n" + " ]\n" + "}"; // @formatter:on // CHECKSTYLE:ON assertTrue(JsonFunctions.jsonPathExists(jsonString, "$.subjects[*].name")); assertEquals(JsonFunctions.jsonPathArrayDefaultEmpty(jsonString, "$.subjects[*].name"), new String[]{"maths", "english"}); assertEquals(JsonFunctions.jsonPathArrayDefaultEmpty(jsonString, "$.subjects[*].grade"), new String[]{"A", "B"}); assertTrue(JsonFunctions.jsonPathExists(jsonString, "$.subjects[*].homework_grades")); assertEquals(JsonFunctions.jsonPathArrayDefaultEmpty(jsonString, "$.subjects[*].homework_grades"), new Object[]{Arrays.asList(80, 85, 90, 95, 100), Arrays.asList(60, 65, 70, 85, 90)}); } @Test public void testJsonFunctionExtractingArrayWithObjectArray() throws JsonProcessingException { // ImmutableList works fine with JsonPath with default JacksonJsonProvider. But on ingestion // path, JSONRecordExtractor converts all Collections in parsed JSON object to Object[]. // Object[] doesn't work with default JsonPath, where "$.commits[*].sha" would return empty, // and "$.commits[1].sha" led to exception `Filter: [1]['sha'] can only be applied to arrays`. // Those failure could be reproduced by using the default JacksonJsonProvider for JsonPath. Map<String, Object> rawData = ImmutableMap.of("commits", ImmutableList.of(ImmutableMap.of("sha", 123, "name", "k"), ImmutableMap.of("sha", 456, "name", "j"))); assertTrue(JsonFunctions.jsonPathExists(rawData, "$.commits[*].sha")); assertEquals(JsonFunctions.jsonPathArray(rawData, "$.commits[*].sha"), new Integer[]{123, 456}); assertTrue(JsonFunctions.jsonPathExists(rawData, "$.commits[1].sha")); assertEquals(JsonFunctions.jsonPathArray(rawData, "$.commits[1].sha"), new Integer[]{456}); // ArrayAwareJacksonJsonProvider should fix this issue. rawData = ImmutableMap.of("commits", new Object[]{ImmutableMap.of("sha", 123, "name", "k"), ImmutableMap.of("sha", 456, "name", "j")}); assertEquals(JsonFunctions.jsonPathArray(rawData, "$.commits[*].sha"), new Integer[]{123, 456}); assertEquals(JsonFunctions.jsonPathArray(rawData, "$.commits[1].sha"), new Integer[]{456}); } @Test public void testJsonFunctionExtractingArrayWithTopLevelObjectArray() throws JsonProcessingException { // JSON formatted string works fine with JsonPath, and we used to serialize Object[] // to JSON formatted string for JsonPath to work. String rawDataInStr = "[{\"sha\": 123, \"name\": \"k\"}, {\"sha\": 456, \"name\": \"j\"}]"; assertTrue(JsonFunctions.jsonPathExists(rawDataInStr, "$.[*].sha")); assertEquals(JsonFunctions.jsonPathArray(rawDataInStr, "$.[*].sha"), new Integer[]{123, 456}); assertTrue(JsonFunctions.jsonPathExists(rawDataInStr, "$.[1].sha")); assertEquals(JsonFunctions.jsonPathArray(rawDataInStr, "$.[1].sha"), new Integer[]{456}); // ArrayAwareJacksonJsonProvider can work with Array directly, thus no need to serialize // Object[] any more. Object[] rawDataInAry = new Object[]{ImmutableMap.of("sha", 123, "name", "kk"), ImmutableMap.of("sha", 456, "name", "jj")}; assertEquals(JsonFunctions.jsonPathArray(rawDataInAry, "$.[*].sha"), new Integer[]{123, 456}); assertEquals(JsonFunctions.jsonPathArray(rawDataInAry, "$.[1].sha"), new Integer[]{456}); } @Test public void testJsonFunctionOnJsonArray() throws JsonProcessingException { // CHECKSTYLE:OFF // @formatter:off String jsonArrayString = "[\n" + " {\n" + " \"name\": \"maths\",\n" + " \"grade\": \"A\",\n" + " \"homework_grades\": [80, 85, 90, 95, 100],\n" + " \"score\": 90\n" + " },\n" + " {\n" + " \"name\": \"english\",\n" + " \"grade\": \"B\",\n" + " \"homework_grades\": [60, 65, 70, 85, 90],\n" + " \"score\": 50\n" + " }\n" + "]"; // @formatter:on // CHECKSTYLE:ON assertTrue(JsonFunctions.jsonPathExists(jsonArrayString, "$.[*].name")); assertEquals(JsonFunctions.jsonPathArray(jsonArrayString, "$.[*].name"), new String[]{"maths", "english"}); assertTrue(JsonFunctions.jsonPathExists(jsonArrayString, "$.[*].grade")); assertEquals(JsonFunctions.jsonPathArray(jsonArrayString, "$.[*].grade"), new String[]{"A", "B"}); assertTrue(JsonFunctions.jsonPathExists(jsonArrayString, "$.[*].homework_grades")); assertEquals(JsonFunctions.jsonPathArray(jsonArrayString, "$.[*].homework_grades"), new Object[]{Arrays.asList(80, 85, 90, 95, 100), Arrays.asList(60, 65, 70, 85, 90)}); assertTrue(JsonFunctions.jsonPathExists(jsonArrayString, "$.[*].score")); assertEquals(JsonFunctions.jsonPathArray(jsonArrayString, "$.[*].score"), new Integer[]{90, 50}); } @Test public void testJsonFunctionOnList() throws JsonProcessingException { List<Map<String, Object>> rawData = new ArrayList<Map<String, Object>>(); rawData.add(ImmutableMap .of("name", "maths", "grade", "A", "score", 90, "homework_grades", Arrays.asList(80, 85, 90, 95, 100))); rawData.add(ImmutableMap .of("name", "english", "grade", "B", "score", 50, "homework_grades", Arrays.asList(60, 65, 70, 85, 90))); assertTrue(JsonFunctions.jsonPathExists(rawData, "$.[*].name")); assertEquals(JsonFunctions.jsonPathArray(rawData, "$.[*].name"), new String[]{"maths", "english"}); assertTrue(JsonFunctions.jsonPathExists(rawData, "$.[*].grade")); assertEquals(JsonFunctions.jsonPathArray(rawData, "$.[*].grade"), new String[]{"A", "B"}); assertTrue(JsonFunctions.jsonPathExists(rawData, "$.[*].homework_grades")); assertEquals(JsonFunctions.jsonPathArray(rawData, "$.[*].homework_grades"), new Object[]{Arrays.asList(80, 85, 90, 95, 100), Arrays.asList(60, 65, 70, 85, 90)}); assertTrue(JsonFunctions.jsonPathExists(rawData, "$.[*].score")); assertEquals(JsonFunctions.jsonPathArray(rawData, "$.[*].score"), new Integer[]{90, 50}); } @Test public void testJsonFunctionOnObjectArray() throws JsonProcessingException { Object[] rawData = new Object[]{ ImmutableMap.of("name", "maths", "grade", "A", "score", 90, "homework_grades", Arrays.asList(80, 85, 90, 95, 100)), ImmutableMap.of("name", "english", "grade", "B", "score", 50, "homework_grades", Arrays.asList(60, 65, 70, 85, 90)) }; assertTrue(JsonFunctions.jsonPathExists(rawData, "$.[*].name")); assertEquals(JsonFunctions.jsonPathArray(rawData, "$.[*].name"), new String[]{"maths", "english"}); assertTrue(JsonFunctions.jsonPathExists(rawData, "$.[*].grade")); assertEquals(JsonFunctions.jsonPathArray(rawData, "$.[*].grade"), new String[]{"A", "B"}); assertTrue(JsonFunctions.jsonPathExists(rawData, "$.[*].homework_grades")); assertEquals(JsonFunctions.jsonPathArray(rawData, "$.[*].homework_grades"), new Object[]{Arrays.asList(80, 85, 90, 95, 100), Arrays.asList(60, 65, 70, 85, 90)}); assertTrue(JsonFunctions.jsonPathExists(rawData, "$.[*].score")); assertEquals(JsonFunctions.jsonPathArray(rawData, "$.[*].score"), new Integer[]{90, 50}); } @DataProvider public static Object[][] jsonPathStringTestCases() { return new Object[][]{ {ImmutableMap.of("foo", "x", "bar", ImmutableMap.of("foo", "y")), "$.foo", "x"}, {ImmutableMap.of("foo", "x", "bar", ImmutableMap.of("foo", "y")), "$.qux", null}, {ImmutableMap.of("foo", "x", "bar", ImmutableMap.of("foo", "y")), "$.bar", "{\"foo\":\"y\"}"}, }; } @Test(dataProvider = "jsonPathStringTestCases") public void testJsonPathString(Map<String, Object> map, String path, String expected) throws JsonProcessingException { String value = JsonFunctions.jsonPathString(JsonUtils.objectToString(map), path); assertEquals(value, expected); } @Test(dataProvider = "jsonPathStringTestCases") public void testJsonPathStringWithDefaultValue(Map<String, Object> map, String path, String expected) throws JsonProcessingException { String value = JsonFunctions.jsonPathString(JsonUtils.objectToString(map), path, expected); assertEquals(value, expected); } @DataProvider public static Object[][] jsonPathArrayTestCases() { return new Object[][]{ {ImmutableMap.of("foo", "x", "bar", ImmutableMap.of("foo", "y")), "$.foo", new Object[]{"x"}}, {ImmutableMap.of("foo", "x", "bar", ImmutableMap.of("foo", "y")), "$.qux", null}, { ImmutableMap.of("foo", "x", "bar", ImmutableMap.of("foo", "y")), "$.bar", new Object[]{ ImmutableMap.of("foo", "y") } }, }; } @Test(dataProvider = "jsonPathArrayTestCases") public void testJsonPathArray(Map<String, Object> map, String path, Object[] expected) throws JsonProcessingException { Object[] value = JsonFunctions.jsonPathArray(JsonUtils.objectToString(map), path); if (expected == null) { assertNull(value); } else { assertEquals(value.length, expected.length); for (int i = 0; i < value.length; i++) { assertEquals(value[i], expected[i]); } } } @Test public void testJsonPathExistsNullObject() { assertFalse(JsonFunctions.jsonPathExists(null, "$.[*].name")); assertFalse(JsonFunctions.jsonPathExists(null, null)); } @Test public void testJsonKeyValueArrayToMap() { String jsonString = "[" + "{\"key\": \"k1\", \"value\": \"v1\"}, " + "{\"key\": \"k2\", \"value\": \"v2\"}, " + "{\"key\": \"k3\", \"value\": \"v3\"}, " + "{\"key\": \"k4\", \"value\": \"v4\"}, " + "{\"key\": \"k5\", \"value\": \"v5\"}" + "]"; Map<String, Object> expected = ImmutableMap.of("k1", "v1", "k2", "v2", "k3", "v3", "k4", "v4", "k5", "v5"); assertEquals(JsonFunctions.jsonKeyValueArrayToMap(jsonString), expected); Object[] jsonArray = new Object[]{ "{\"key\": \"k1\", \"value\": \"v1\"}", "{\"key\": \"k2\", \"value\": \"v2\"}", "{\"key\": \"k3\", \"value\": \"v3\"}", "{\"key\": \"k4\", \"value\": \"v4\"}", "{\"key\": \"k5\", \"value\": \"v5\"}" }; assertEquals(JsonFunctions.jsonKeyValueArrayToMap(jsonArray), expected); List<Object> jsonList = ImmutableList.of( "{\"key\": \"k1\", \"value\": \"v1\"}", "{\"key\": \"k2\", \"value\": \"v2\"}", "{\"key\": \"k3\", \"value\": \"v3\"}", "{\"key\": \"k4\", \"value\": \"v4\"}", "{\"key\": \"k5\", \"value\": \"v5\"}" ); assertEquals(JsonFunctions.jsonKeyValueArrayToMap(jsonList), expected); } @Test public void testJsonStringToCollection() { String jsonArrayString = "[{\"k1\":\"v1\"}, {\"k2\":\"v2\"}, {\"k3\":\"v3\"}, {\"k4\":\"v4\"}, {\"k5\":\"v5\"}]"; List<Map<String, String>> expectedArray = List.of(Map.of("k1", "v1"), Map.of("k2", "v2"), Map.of("k3", "v3"), Map.of("k4", "v4"), Map.of("k5", "v5")); assertEquals(JsonFunctions.jsonStringToArray(jsonArrayString), expectedArray); assertEquals(JsonFunctions.jsonStringToListOrMap(jsonArrayString), expectedArray); String jsonMapString = "{\"k1\":\"v1\", \"k2\":\"v2\", \"k3\":\"v3\", \"k4\":\"v4\",\"k5\":\"v5\"}"; Map<String, String> expectedMap = Map.of("k1", "v1", "k2", "v2", "k3", "v3", "k4", "v4", "k5", "v5"); assertEquals(JsonFunctions.jsonStringToMap(jsonMapString), expectedMap); assertEquals(JsonFunctions.jsonStringToListOrMap(jsonMapString), expectedMap); String invalidJson = "[\"k1\":\"v1\"}"; assertNull(JsonFunctions.jsonStringToMap(invalidJson)); assertNull(JsonFunctions.jsonStringToListOrMap(invalidJson)); } @Test public void testJsonKeysFlatAndNested() throws IOException { String flatJson = "{\"a\":1,\"b\":2}"; String nestedJson = "{\"a\":1,\"b\":{\"c\":2,\"d\":3},\"f\":4}"; // For extracting all keys at all levels, use $..** Assert.assertEqualsNoOrder(JsonFunctions.jsonExtractKey(flatJson, "$..**", "maxDepth=1").toArray(), new String[]{"$['a']", "$['b']"}); // Test with nested JSON - $.** should give us all paths List<String> nestedResult = JsonFunctions.jsonExtractKey(nestedJson, "$..**", "maxDepth=2"); System.out.println("Nested result: " + nestedResult); // Just test that we get some results for now Assert.assertTrue(nestedResult.size() > 0); } @Test public void testJsonKeysArrayAndNull() throws IOException { String arrayJson = "[{\"a\":1},{\"b\":2}]"; List<String> result = JsonFunctions.jsonExtractKey(arrayJson, "$..**", "maxDepth=2"); System.out.println("Array result: " + result); // Test null and invalid cases Assert.assertEquals(JsonFunctions.jsonExtractKey(null, "$..**", "maxDepth=2").size(), 0); Assert.assertEquals(JsonFunctions.jsonExtractKey("not a json", "$..**", "maxDepth=2").size(), 0); Assert.assertEquals(JsonFunctions.jsonExtractKey("{\"a\":1}", "$..**", "maxDepth=0").size(), 0); } @Test public void testJsonKeysEdgeCases() throws IOException { // Test with negative depth Assert.assertEquals(JsonFunctions.jsonExtractKey("{\"a\":1}", "$..**", "maxDepth=-1").size(), 1); // Test with empty string Assert.assertEquals(JsonFunctions.jsonExtractKey("", "$..**", "maxDepth=1").size(), 0); // Test with null JSON value Assert.assertEquals(JsonFunctions.jsonExtractKey("null", "$..**", "maxDepth=1").size(), 0); // Test with empty JSON object Assert.assertEquals(JsonFunctions.jsonExtractKey("{}", "$..**", "maxDepth=1").size(), 0); // Test with empty JSON array Assert.assertEquals(JsonFunctions.jsonExtractKey("[]", "$..**", "maxDepth=1").size(), 0); // Test with various object types Map<String, Object> mapObj = new java.util.HashMap<>(); mapObj.put("key1", "value1"); mapObj.put("key2", 42); List<String> mapResult = JsonFunctions.jsonExtractKey(mapObj, "$..**", "maxDepth=1"); System.out.println("Map result: " + mapResult); Assert.assertTrue(mapResult.size() > 0); List<Object> listObj = new java.util.ArrayList<>(); listObj.add(Map.of("key1", "value1")); listObj.add(Map.of("key2", "value2")); List<String> listResult = JsonFunctions.jsonExtractKey(listObj, "$..**", "maxDepth=2"); System.out.println("List result: " + listResult); Assert.assertTrue(listResult.size() > 0); String deepJson = "{\"a\":{\"b\":{\"c\":{\"d\":1}}}}"; List<String> deepResult = JsonFunctions.jsonExtractKey(deepJson, "$..**", "maxDepth=3"); System.out.println("Deep result: " + deepResult); Assert.assertTrue(deepResult.size() > 0); } @Test public void testJsonExtractKeyDotNotation() throws IOException { String nestedJson = "{\"a\":1,\"b\":{\"c\":2,\"d\":{\"e\":3}}}"; // Test 4-parameter version with dotNotation=true List<String> dotNotationResult = JsonFunctions.jsonExtractKey(nestedJson, "$..**", "maxDepth=3;dotNotation=true"); List<String> expectedDotNotation = Arrays.asList("a", "b", "b.c", "b.d", "b.d.e"); Assert.assertEqualsNoOrder(dotNotationResult.toArray(), expectedDotNotation.toArray()); // Test 4-parameter version with dotNotation=false (JsonPath format) List<String> jsonPathResult = JsonFunctions.jsonExtractKey(nestedJson, "$..**", "maxDepth=3;dotNotation=false"); List<String> expectedJsonPath = Arrays.asList("$['a']", "$['b']", "$['b']['c']", "$['b']['d']", "$['b']['d']['e']"); Assert.assertEqualsNoOrder(jsonPathResult.toArray(), expectedJsonPath.toArray()); // Test with arrays in dot notation String arrayJson = "{\"users\":[{\"name\":\"Alice\"},{\"name\":\"Bob\"}]}"; List<String> arrayDotResult = JsonFunctions.jsonExtractKey(arrayJson, "$..**", "maxDepth=3;dotNotation=true"); List<String> expectedArrayDot = Arrays.asList("users", "users.0", "users.0.name", "users.1", "users.1.name"); Assert.assertEqualsNoOrder(arrayDotResult.toArray(), expectedArrayDot.toArray()); } @Test public void testJsonExtractKeyDepthLimiting() throws IOException { String deepJson = "{\"a\":{\"b\":{\"c\":{\"d\":1}}}}"; // Test depth=1 (only top level) List<String> depth1 = JsonFunctions.jsonExtractKey(deepJson, "$..**", "maxDepth=1"); Assert.assertEquals(depth1, Arrays.asList("$['a']")); // Test depth=2 List<String> depth2 = JsonFunctions.jsonExtractKey(deepJson, "$..**", "maxDepth=2"); Assert.assertEqualsNoOrder(depth2.toArray(), new String[]{"$['a']", "$['a']['b']"}); // Test depth=3 List<String> depth3 = JsonFunctions.jsonExtractKey(deepJson, "$..**", "maxDepth=3"); Assert.assertEqualsNoOrder(depth3.toArray(), new String[]{"$['a']", "$['a']['b']", "$['a']['b']['c']"}); // Test depth=4 (includes all levels) List<String> depth4 = JsonFunctions.jsonExtractKey(deepJson, "$..**", "maxDepth=4"); Assert.assertEqualsNoOrder(depth4.toArray(), new String[]{"$['a']", "$['a']['b']", "$['a']['b']['c']", "$['a']['b']['c']['d']"}); } @Test public void testJsonExtractKeyRecursiveExpressions() throws IOException { String json = "{\"a\":1,\"b\":{\"c\":2,\"d\":3}}"; // Test $..** List<String> recursiveResult = JsonFunctions.jsonExtractKey(json, "$..**", "maxDepth=-1"); List<String> expected = Arrays.asList("$['a']", "$['b']", "$['b']['c']", "$['b']['d']"); Assert.assertEqualsNoOrder(recursiveResult.toArray(), expected.toArray()); // Test $.. (should work the same as $..**) List<String> dotDotResult = JsonFunctions.jsonExtractKey(json, "$..", "maxDepth=-1"); Assert.assertEqualsNoOrder(dotDotResult.toArray(), expected.toArray()); // Test with mixed object and array structure String mixedJson = "{\"data\":[{\"id\":1,\"info\":{\"name\":\"test\"}}]}"; List<String> mixedResult = JsonFunctions.jsonExtractKey(mixedJson, "$..**", "maxDepth=2147483647"); List<String> expectedMixed = Arrays.asList( "$['data']", "$['data'][0]", "$['data'][0]['id']", "$['data'][0]['info']", "$['data'][0]['info']['name']"); Assert.assertEqualsNoOrder(mixedResult.toArray(), expectedMixed.toArray()); } @Test public void testJsonExtractKeyArrayHandling() throws IOException { String arrayJson = "[{\"a\":1},{\"b\":2},{\"c\":{\"d\":3}}]"; // Test recursive extraction from array List<String> result = JsonFunctions.jsonExtractKey(arrayJson, "$..**", "maxDepth=3"); List<String> expected = Arrays.asList("$[0]", "$[0]['a']", "$[1]", "$[1]['b']", "$[2]", "$[2]['c']", "$[2]['c']['d']"); Assert.assertEqualsNoOrder(result.toArray(), expected.toArray()); // Test with dot notation List<String> dotResult = JsonFunctions.jsonExtractKey(arrayJson, "$..**", "maxDepth=3;dotNotation=true"); List<String> expectedDot = Arrays.asList("0", "0.a", "1", "1.b", "2", "2.c", "2.c.d"); Assert.assertEqualsNoOrder(dotResult.toArray(), expectedDot.toArray()); } @Test public void testJsonExtractKeyComplexStructures() throws IOException { // Test complex nested structure with various data types String complexJson = "{" + "\"users\":{" + " \"active\":[{\"id\":1,\"profile\":{\"name\":\"Alice\",\"settings\":{\"theme\":\"dark\"}}}]," + " \"inactive\":[{\"id\":2,\"profile\":{\"name\":\"Bob\"}}]" + "}," + "\"metadata\":{\"version\":\"1.0\",\"tags\":[\"important\",\"test\"]}" + "}"; // Test with depth limiting List<String> depth2Result = JsonFunctions.jsonExtractKey(complexJson, "$..**", "maxDepth=2;dotNotation=true"); Assert.assertTrue(depth2Result.contains("users")); Assert.assertTrue(depth2Result.contains("metadata")); Assert.assertTrue(depth2Result.contains("users.active")); Assert.assertTrue(depth2Result.contains("users.inactive")); Assert.assertTrue(depth2Result.contains("metadata.version")); Assert.assertTrue(depth2Result.contains("metadata.tags")); // Ensure we don't get deeper levels Assert.assertFalse(depth2Result.contains("users.active.0")); Assert.assertFalse(depth2Result.contains("metadata.tags.0")); } @Test public void testJsonExtractKeyNonRecursiveExpressions() throws IOException { String json = "{\"a\":1,\"b\":{\"c\":2,\"d\":3}}"; // Test $.* (top level only) List<String> topLevelResult = JsonFunctions.jsonExtractKey(json, "$.*", "maxDepth=-3"); List<String> expectedTopLevel = Arrays.asList("$['a']", "$['b']"); Assert.assertEqualsNoOrder(topLevelResult.toArray(), expectedTopLevel.toArray()); // Test specific path $.b.* List<String> specificResult = JsonFunctions.jsonExtractKey(json, "$.b.*", "maxDepth=-1"); List<String> expectedSpecific = Arrays.asList("$['b']['c']", "$['b']['d']"); Assert.assertEqualsNoOrder(specificResult.toArray(), expectedSpecific.toArray()); } @Test public void testJsonExtractKeyEdgeCasesWithDotNotation() throws IOException { // Test with zero depth Assert.assertEquals(JsonFunctions.jsonExtractKey("{\"a\":1}", "$..**", "maxDepth=0;dotNotation=true").size(), 0); Assert.assertEquals(JsonFunctions.jsonExtractKey("{\"a\":1}", "$..**", "maxDepth=0;dotNotation=false").size(), 0); // Test with negative depth Assert.assertEquals(JsonFunctions.jsonExtractKey("{\"a\":1}", "$..**", "maxDepth=-1;dotNotation=true").size(), 1); Assert.assertEquals(JsonFunctions.jsonExtractKey("{\"a\":1}", "$..**", "maxDepth=-1;dotNotation=false").size(), 1); // Test with empty objects and arrays Assert.assertEquals(JsonFunctions.jsonExtractKey("{}", "$..**", "maxDepth=5;dotNotation=true").size(), 0); Assert.assertEquals(JsonFunctions.jsonExtractKey("[]", "$..**", "maxDepth=5;dotNotation=false").size(), 0); // Test with invalid JSON Assert.assertEquals(JsonFunctions.jsonExtractKey("invalid json", "$..**", "maxDepth=5;dotNotation=true").size(), 0); Assert.assertEquals(JsonFunctions.jsonExtractKey(null, "$..**", "maxDepth=5;dotNotation=true").size(), 0); } @Test public void testJsonExtractKeyBackwardCompatibility() throws IOException { String json = "{\"a\":1,\"b\":{\"c\":2}}"; // Test 2-parameter version (should default to maxDepth=Integer.MAX_VALUE, dotNotation=false) List<String> twoParamResult = JsonFunctions.jsonExtractKey(json, "$..**"); List<String> fourParamResult = JsonFunctions.jsonExtractKey(json, "$..**", "maxDepth=2147483647;dotNotation=false"); Assert.assertEquals(twoParamResult, fourParamResult); // Test 3-parameter version (should default to dotNotation=false) List<String> threeParamResult = JsonFunctions.jsonExtractKey(json, "$..**", "maxDepth=2"); List<String> fourParamResultWithDepth = JsonFunctions.jsonExtractKey(json, "$..**", "maxDepth=2;dotNotation=false"); Assert.assertEquals(threeParamResult, fourParamResultWithDepth); } @Test public void testJsonExtractKeySpecialCharacters() throws IOException { String specialJson = "{" + "\"field-with-dash\":1," + "\"field.with.dots\":2," + "\"field_with_underscores\":3," + "\"field with spaces\":4" + "}"; // Test with special characters in field names List<String> result = JsonFunctions.jsonExtractKey(specialJson, "$..**", "maxDepth=1;dotNotation=true"); Assert.assertTrue(result.contains("field-with-dash")); Assert.assertTrue(result.contains("field.with.dots")); Assert.assertTrue(result.contains("field_with_underscores")); Assert.assertTrue(result.contains("field with spaces")); // Test JsonPath format List<String> jsonPathResult = JsonFunctions.jsonExtractKey(specialJson, "$..**", "maxDepth=1;dotNotation=false"); Assert.assertTrue(jsonPathResult.contains("$['field-with-dash']")); Assert.assertTrue(jsonPathResult.contains("$['field.with.dots']")); Assert.assertTrue(jsonPathResult.contains("$['field_with_underscores']")); Assert.assertTrue(jsonPathResult.contains("$['field with spaces']")); } }
googleapis/google-cloud-java
36,134
java-shopping-merchant-accounts/proto-google-shopping-merchant-accounts-v1/src/main/java/com/google/shopping/merchant/accounts/v1/ListUsersResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/shopping/merchant/accounts/v1/user.proto // Protobuf Java Version: 3.25.8 package com.google.shopping.merchant.accounts.v1; /** * * * <pre> * Response message for the `ListUsers` method. * </pre> * * Protobuf type {@code google.shopping.merchant.accounts.v1.ListUsersResponse} */ public final class ListUsersResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.shopping.merchant.accounts.v1.ListUsersResponse) ListUsersResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListUsersResponse.newBuilder() to construct. private ListUsersResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListUsersResponse() { users_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListUsersResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.accounts.v1.UserProto .internal_static_google_shopping_merchant_accounts_v1_ListUsersResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.accounts.v1.UserProto .internal_static_google_shopping_merchant_accounts_v1_ListUsersResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.accounts.v1.ListUsersResponse.class, com.google.shopping.merchant.accounts.v1.ListUsersResponse.Builder.class); } public static final int USERS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.shopping.merchant.accounts.v1.User> users_; /** * * * <pre> * The users from the specified account. * </pre> * * <code>repeated .google.shopping.merchant.accounts.v1.User users = 1;</code> */ @java.lang.Override public java.util.List<com.google.shopping.merchant.accounts.v1.User> getUsersList() { return users_; } /** * * * <pre> * The users from the specified account. * </pre> * * <code>repeated .google.shopping.merchant.accounts.v1.User users = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.shopping.merchant.accounts.v1.UserOrBuilder> getUsersOrBuilderList() { return users_; } /** * * * <pre> * The users from the specified account. * </pre> * * <code>repeated .google.shopping.merchant.accounts.v1.User users = 1;</code> */ @java.lang.Override public int getUsersCount() { return users_.size(); } /** * * * <pre> * The users from the specified account. * </pre> * * <code>repeated .google.shopping.merchant.accounts.v1.User users = 1;</code> */ @java.lang.Override public com.google.shopping.merchant.accounts.v1.User getUsers(int index) { return users_.get(index); } /** * * * <pre> * The users from the specified account. * </pre> * * <code>repeated .google.shopping.merchant.accounts.v1.User users = 1;</code> */ @java.lang.Override public com.google.shopping.merchant.accounts.v1.UserOrBuilder getUsersOrBuilder(int index) { return users_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < users_.size(); i++) { output.writeMessage(1, users_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < users_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, users_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.shopping.merchant.accounts.v1.ListUsersResponse)) { return super.equals(obj); } com.google.shopping.merchant.accounts.v1.ListUsersResponse other = (com.google.shopping.merchant.accounts.v1.ListUsersResponse) obj; if (!getUsersList().equals(other.getUsersList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getUsersCount() > 0) { hash = (37 * hash) + USERS_FIELD_NUMBER; hash = (53 * hash) + getUsersList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.shopping.merchant.accounts.v1.ListUsersResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.accounts.v1.ListUsersResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1.ListUsersResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.accounts.v1.ListUsersResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1.ListUsersResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.accounts.v1.ListUsersResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1.ListUsersResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.accounts.v1.ListUsersResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1.ListUsersResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.shopping.merchant.accounts.v1.ListUsersResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1.ListUsersResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.accounts.v1.ListUsersResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.shopping.merchant.accounts.v1.ListUsersResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for the `ListUsers` method. * </pre> * * Protobuf type {@code google.shopping.merchant.accounts.v1.ListUsersResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.shopping.merchant.accounts.v1.ListUsersResponse) com.google.shopping.merchant.accounts.v1.ListUsersResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.accounts.v1.UserProto .internal_static_google_shopping_merchant_accounts_v1_ListUsersResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.accounts.v1.UserProto .internal_static_google_shopping_merchant_accounts_v1_ListUsersResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.accounts.v1.ListUsersResponse.class, com.google.shopping.merchant.accounts.v1.ListUsersResponse.Builder.class); } // Construct using com.google.shopping.merchant.accounts.v1.ListUsersResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (usersBuilder_ == null) { users_ = java.util.Collections.emptyList(); } else { users_ = null; usersBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.shopping.merchant.accounts.v1.UserProto .internal_static_google_shopping_merchant_accounts_v1_ListUsersResponse_descriptor; } @java.lang.Override public com.google.shopping.merchant.accounts.v1.ListUsersResponse getDefaultInstanceForType() { return com.google.shopping.merchant.accounts.v1.ListUsersResponse.getDefaultInstance(); } @java.lang.Override public com.google.shopping.merchant.accounts.v1.ListUsersResponse build() { com.google.shopping.merchant.accounts.v1.ListUsersResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.shopping.merchant.accounts.v1.ListUsersResponse buildPartial() { com.google.shopping.merchant.accounts.v1.ListUsersResponse result = new com.google.shopping.merchant.accounts.v1.ListUsersResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.shopping.merchant.accounts.v1.ListUsersResponse result) { if (usersBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { users_ = java.util.Collections.unmodifiableList(users_); bitField0_ = (bitField0_ & ~0x00000001); } result.users_ = users_; } else { result.users_ = usersBuilder_.build(); } } private void buildPartial0(com.google.shopping.merchant.accounts.v1.ListUsersResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.shopping.merchant.accounts.v1.ListUsersResponse) { return mergeFrom((com.google.shopping.merchant.accounts.v1.ListUsersResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.shopping.merchant.accounts.v1.ListUsersResponse other) { if (other == com.google.shopping.merchant.accounts.v1.ListUsersResponse.getDefaultInstance()) return this; if (usersBuilder_ == null) { if (!other.users_.isEmpty()) { if (users_.isEmpty()) { users_ = other.users_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureUsersIsMutable(); users_.addAll(other.users_); } onChanged(); } } else { if (!other.users_.isEmpty()) { if (usersBuilder_.isEmpty()) { usersBuilder_.dispose(); usersBuilder_ = null; users_ = other.users_; bitField0_ = (bitField0_ & ~0x00000001); usersBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getUsersFieldBuilder() : null; } else { usersBuilder_.addAllMessages(other.users_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.shopping.merchant.accounts.v1.User m = input.readMessage( com.google.shopping.merchant.accounts.v1.User.parser(), extensionRegistry); if (usersBuilder_ == null) { ensureUsersIsMutable(); users_.add(m); } else { usersBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.shopping.merchant.accounts.v1.User> users_ = java.util.Collections.emptyList(); private void ensureUsersIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { users_ = new java.util.ArrayList<com.google.shopping.merchant.accounts.v1.User>(users_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.shopping.merchant.accounts.v1.User, com.google.shopping.merchant.accounts.v1.User.Builder, com.google.shopping.merchant.accounts.v1.UserOrBuilder> usersBuilder_; /** * * * <pre> * The users from the specified account. * </pre> * * <code>repeated .google.shopping.merchant.accounts.v1.User users = 1;</code> */ public java.util.List<com.google.shopping.merchant.accounts.v1.User> getUsersList() { if (usersBuilder_ == null) { return java.util.Collections.unmodifiableList(users_); } else { return usersBuilder_.getMessageList(); } } /** * * * <pre> * The users from the specified account. * </pre> * * <code>repeated .google.shopping.merchant.accounts.v1.User users = 1;</code> */ public int getUsersCount() { if (usersBuilder_ == null) { return users_.size(); } else { return usersBuilder_.getCount(); } } /** * * * <pre> * The users from the specified account. * </pre> * * <code>repeated .google.shopping.merchant.accounts.v1.User users = 1;</code> */ public com.google.shopping.merchant.accounts.v1.User getUsers(int index) { if (usersBuilder_ == null) { return users_.get(index); } else { return usersBuilder_.getMessage(index); } } /** * * * <pre> * The users from the specified account. * </pre> * * <code>repeated .google.shopping.merchant.accounts.v1.User users = 1;</code> */ public Builder setUsers(int index, com.google.shopping.merchant.accounts.v1.User value) { if (usersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUsersIsMutable(); users_.set(index, value); onChanged(); } else { usersBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The users from the specified account. * </pre> * * <code>repeated .google.shopping.merchant.accounts.v1.User users = 1;</code> */ public Builder setUsers( int index, com.google.shopping.merchant.accounts.v1.User.Builder builderForValue) { if (usersBuilder_ == null) { ensureUsersIsMutable(); users_.set(index, builderForValue.build()); onChanged(); } else { usersBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The users from the specified account. * </pre> * * <code>repeated .google.shopping.merchant.accounts.v1.User users = 1;</code> */ public Builder addUsers(com.google.shopping.merchant.accounts.v1.User value) { if (usersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUsersIsMutable(); users_.add(value); onChanged(); } else { usersBuilder_.addMessage(value); } return this; } /** * * * <pre> * The users from the specified account. * </pre> * * <code>repeated .google.shopping.merchant.accounts.v1.User users = 1;</code> */ public Builder addUsers(int index, com.google.shopping.merchant.accounts.v1.User value) { if (usersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUsersIsMutable(); users_.add(index, value); onChanged(); } else { usersBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The users from the specified account. * </pre> * * <code>repeated .google.shopping.merchant.accounts.v1.User users = 1;</code> */ public Builder addUsers(com.google.shopping.merchant.accounts.v1.User.Builder builderForValue) { if (usersBuilder_ == null) { ensureUsersIsMutable(); users_.add(builderForValue.build()); onChanged(); } else { usersBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The users from the specified account. * </pre> * * <code>repeated .google.shopping.merchant.accounts.v1.User users = 1;</code> */ public Builder addUsers( int index, com.google.shopping.merchant.accounts.v1.User.Builder builderForValue) { if (usersBuilder_ == null) { ensureUsersIsMutable(); users_.add(index, builderForValue.build()); onChanged(); } else { usersBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The users from the specified account. * </pre> * * <code>repeated .google.shopping.merchant.accounts.v1.User users = 1;</code> */ public Builder addAllUsers( java.lang.Iterable<? extends com.google.shopping.merchant.accounts.v1.User> values) { if (usersBuilder_ == null) { ensureUsersIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, users_); onChanged(); } else { usersBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The users from the specified account. * </pre> * * <code>repeated .google.shopping.merchant.accounts.v1.User users = 1;</code> */ public Builder clearUsers() { if (usersBuilder_ == null) { users_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { usersBuilder_.clear(); } return this; } /** * * * <pre> * The users from the specified account. * </pre> * * <code>repeated .google.shopping.merchant.accounts.v1.User users = 1;</code> */ public Builder removeUsers(int index) { if (usersBuilder_ == null) { ensureUsersIsMutable(); users_.remove(index); onChanged(); } else { usersBuilder_.remove(index); } return this; } /** * * * <pre> * The users from the specified account. * </pre> * * <code>repeated .google.shopping.merchant.accounts.v1.User users = 1;</code> */ public com.google.shopping.merchant.accounts.v1.User.Builder getUsersBuilder(int index) { return getUsersFieldBuilder().getBuilder(index); } /** * * * <pre> * The users from the specified account. * </pre> * * <code>repeated .google.shopping.merchant.accounts.v1.User users = 1;</code> */ public com.google.shopping.merchant.accounts.v1.UserOrBuilder getUsersOrBuilder(int index) { if (usersBuilder_ == null) { return users_.get(index); } else { return usersBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The users from the specified account. * </pre> * * <code>repeated .google.shopping.merchant.accounts.v1.User users = 1;</code> */ public java.util.List<? extends com.google.shopping.merchant.accounts.v1.UserOrBuilder> getUsersOrBuilderList() { if (usersBuilder_ != null) { return usersBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(users_); } } /** * * * <pre> * The users from the specified account. * </pre> * * <code>repeated .google.shopping.merchant.accounts.v1.User users = 1;</code> */ public com.google.shopping.merchant.accounts.v1.User.Builder addUsersBuilder() { return getUsersFieldBuilder() .addBuilder(com.google.shopping.merchant.accounts.v1.User.getDefaultInstance()); } /** * * * <pre> * The users from the specified account. * </pre> * * <code>repeated .google.shopping.merchant.accounts.v1.User users = 1;</code> */ public com.google.shopping.merchant.accounts.v1.User.Builder addUsersBuilder(int index) { return getUsersFieldBuilder() .addBuilder(index, com.google.shopping.merchant.accounts.v1.User.getDefaultInstance()); } /** * * * <pre> * The users from the specified account. * </pre> * * <code>repeated .google.shopping.merchant.accounts.v1.User users = 1;</code> */ public java.util.List<com.google.shopping.merchant.accounts.v1.User.Builder> getUsersBuilderList() { return getUsersFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.shopping.merchant.accounts.v1.User, com.google.shopping.merchant.accounts.v1.User.Builder, com.google.shopping.merchant.accounts.v1.UserOrBuilder> getUsersFieldBuilder() { if (usersBuilder_ == null) { usersBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.shopping.merchant.accounts.v1.User, com.google.shopping.merchant.accounts.v1.User.Builder, com.google.shopping.merchant.accounts.v1.UserOrBuilder>( users_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); users_ = null; } return usersBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.shopping.merchant.accounts.v1.ListUsersResponse) } // @@protoc_insertion_point(class_scope:google.shopping.merchant.accounts.v1.ListUsersResponse) private static final com.google.shopping.merchant.accounts.v1.ListUsersResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.shopping.merchant.accounts.v1.ListUsersResponse(); } public static com.google.shopping.merchant.accounts.v1.ListUsersResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListUsersResponse> PARSER = new com.google.protobuf.AbstractParser<ListUsersResponse>() { @java.lang.Override public ListUsersResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListUsersResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListUsersResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.shopping.merchant.accounts.v1.ListUsersResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/derby
34,077
java/org.apache.derby.engine/org/apache/derby/iapi/types/SQLBinary.java
/* Derby - Class org.apache.derby.iapi.types.SQLBinary Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derby.iapi.types; import org.apache.derby.iapi.sql.conn.StatementContext; import org.apache.derby.shared.common.reference.ContextId; import org.apache.derby.shared.common.reference.SQLState; import org.apache.derby.shared.common.reference.MessageId; import org.apache.derby.shared.common.error.StandardException; import org.apache.derby.iapi.services.context.ContextService; import org.apache.derby.iapi.services.io.DerbyIOException; import org.apache.derby.iapi.services.io.StoredFormatIds; import org.apache.derby.iapi.services.io.FormatIdInputStream; import org.apache.derby.iapi.services.io.InputStreamUtil; import org.apache.derby.shared.common.sanity.SanityManager; import org.apache.derby.shared.common.i18n.MessageService; import org.apache.derby.iapi.services.cache.ClassSize; import java.io.ObjectOutput; import java.io.ObjectInput; import java.io.IOException; import java.io.InputStream; import java.sql.Blob; import java.sql.DataTruncation; import java.sql.SQLException; import java.sql.PreparedStatement; /** * SQLBinary is the abstract class for the binary datatypes. * <UL> * <LI> CHAR FOR BIT DATA * <LI> VARCHAR FOR BIT DATA * <LI> LONG VARCHAR * <LI> BLOB * </UL> <P> Format : &lt;encoded length&gt;&lt;raw data&gt; <BR> Length is encoded to support Cloudscape 5.x databases where the length was stored as the number of bits. The first bit of the first byte indicates if the format is an old (Cloudscape 5.x) style or a new Derby style. Derby then uses the next two bits to indicate how the length is encoded. <BR> &lt;encoded length&gt; is one of N styles. <UL> <LI> (5.x format zero) 4 byte Java format integer value 0 - either &lt;raw data&gt; is 0 bytes/bits or an unknown number of bytes.</LI> <LI> (5.x format bits) 4 byte Java format integer value &gt;0 (positive) - number of bits in raw data, number of bytes in &lt;raw data&gt; is the minimum number of bytes required to store the number of bits.</LI> <LI> (Derby format) 1 byte encoded length (0 &lt;= L &lt;= 31) - number of bytes of raw data - encoded = 0x80 &amp; L</LI> <LI> (Derby format) 3 byte encoded length (32 &lt;= L &lt; 64k) - number of bytes of raw data - encoded = 0xA0 &lt;L as Java format unsigned short&gt;</LI> <LI> (Derby format) 5 byte encoded length (64k &lt;= L &lt; 2G) - number of bytes of raw data - encoded = 0xC0 &lt;L as Java format integer&gt;</LI> <LI> (future) to be determined L &gt;= 2G - encoded 0xE0 &lt;encoding of L to be determined&gt; (0xE0 is an esacape to allow any number of arbitary encodings in the future).</LI> </UL> <BR> When the value was written from a byte array the Derby encoded byte length format was always used from Derby 10.0 onwards (ie. all open source versions). <BR> When the value was written from a stream (e.g. PreparedStatement.setBinaryStream) then the Cloudscape '5.x format zero' was used by 10.0 and 10.1. The was due to the class RawToBinaryFormatStream always writing four zero bytes for the length before the data. <BR> The Cloudscape '5.x format bits' format I think was never used by Derby. */ abstract class SQLBinary extends DataType implements BitDataValue { static final byte PAD = (byte) 0x20; private static final int BASE_MEMORY_USAGE = ClassSize.estimateBaseFromCatalog( SQLBinary.class); private static final int LEN_OF_BUFFER_TO_WRITE_BLOB = 1024; public int estimateMemoryUsage() { if (dataValue == null) { if (streamValueLength>=0) { return BASE_MEMORY_USAGE + streamValueLength; } else { return getMaxMemoryUsage(); } } else { return BASE_MEMORY_USAGE + dataValue.length; } } // end of estimateMemoryUsage /** * Return max memory usage for a SQL Binary */ abstract int getMaxMemoryUsage(); /* * value as a blob */ Blob _blobValue; /* * object state */ byte[] dataValue; /** * Value as a stream, this stream represents the on-disk * format of the value. That is it has length information * encoded in the first fe bytes. */ InputStream stream; /** Length of the value in bytes when this value is set as a stream. Represents the length of the value itself and not the length of the stream which contains this length encoded as the first few bytes. If the value of the stream is unknown then this will be set to -1. If this value is not set as a stream then this value should be ignored. */ int streamValueLength; /** Create a binary value set to NULL */ SQLBinary() { } SQLBinary(byte[] val) { dataValue = val; } SQLBinary(Blob val) { setValue( val ); } public final void setValue(byte[] theValue) { dataValue = theValue; _blobValue = null; stream = null; streamValueLength = -1; } public final void setValue(Blob theValue) { dataValue = null; _blobValue = theValue; stream = null; streamValueLength = -1; } /** * Used by JDBC -- string should not contain * SQL92 formatting. * * @exception StandardException Thrown on error */ public final String getString() throws StandardException { if (getValue() == null) return null; else if (dataValue.length * 2 < 0) //if converted to hex, length exceeds max int { throw StandardException.newException(SQLState.LANG_STRING_TRUNCATION, getTypeName(), "", String.valueOf(Integer.MAX_VALUE)); } else { return org.apache.derby.iapi.util.StringUtil.toHexString(dataValue, 0, dataValue.length); } } /** * @exception StandardException Thrown on error */ public final InputStream getStream() throws StandardException { if (!hasStream()) { throw StandardException.newException( SQLState.LANG_STREAM_INVALID_ACCESS, getTypeName()); } return (stream); } /** * * @exception StandardException Thrown on error */ public final byte[] getBytes() throws StandardException { return getValue(); } byte[] getValue() throws StandardException { try { if ((dataValue == null) && (_blobValue != null) ) { dataValue = _blobValue.getBytes( 1L, getBlobLength() ); _blobValue = null; stream = null; streamValueLength = -1; } else if ((dataValue == null) && (stream != null) ) { if (stream instanceof FormatIdInputStream) { readExternal((FormatIdInputStream) stream); } else { readExternal(new FormatIdInputStream(stream)); } _blobValue = null; stream = null; streamValueLength = -1; } } catch (IOException ioe) { throwStreamingIOException(ioe); } catch (SQLException se) { throw StandardException.plainWrapException( se ); } return dataValue; } /** * length in bytes * * @exception StandardException Thrown on error */ public final int getLength() throws StandardException { if ( _blobValue != null ) { return getBlobLength(); } else if (stream != null) { if (streamValueLength != -1) return streamValueLength; else if (stream instanceof Resetable){ try { // If we have the stream length encoded. // just read that. streamValueLength = readBinaryLength((ObjectInput) stream); if (streamValueLength == 0) { // Otherwise we will have to read the whole stream. streamValueLength = (int) InputStreamUtil.skipUntilEOF(stream); } return streamValueLength; } catch (IOException ioe) { throwStreamingIOException(ioe); } finally { try { ((Resetable) stream).resetStream(); } catch (IOException ioe) { throwStreamingIOException(ioe); } } } } byte[] bytes = getBytes(); return (bytes == null) ? 0 : bytes.length; } private void throwStreamingIOException(IOException ioe) throws StandardException { throw StandardException. newException(SQLState.LANG_STREAMING_COLUMN_I_O_EXCEPTION, ioe, getTypeName()); } /* * Storable interface, implies Externalizable, TypedFormat */ /** * see if the Bit value is null. * @see org.apache.derby.iapi.services.io.Storable#isNull */ public final boolean isNull() { return (dataValue == null) && (stream == null) && (_blobValue == null); } /** Write the value out from the byte array (not called if null) using the 8.1 encoding. * @exception IOException io exception */ public final void writeExternal(ObjectOutput out) throws IOException { if ( _blobValue != null ) { writeBlob( out ); return; } int len = dataValue.length; writeLength( out, len ); out.write(dataValue, 0, dataValue.length); } /** Serialize a blob using the 8.1 encoding. Not called if null. * @exception IOException io exception */ private void writeBlob(ObjectOutput out) throws IOException { try { int len = getBlobLength(); InputStream is = _blobValue.getBinaryStream(); writeLength( out, len ); int bytesRead = 0; int numOfBytes = 0; byte[] buffer = new byte[Math.min(len, LEN_OF_BUFFER_TO_WRITE_BLOB)]; while(bytesRead < len) { numOfBytes = is.read(buffer); if (numOfBytes == -1) { throw new DerbyIOException( MessageService.getTextMessage( SQLState.SET_STREAM_INEXACT_LENGTH_DATA), SQLState.SET_STREAM_INEXACT_LENGTH_DATA); } out.write(buffer, 0, numOfBytes); bytesRead += numOfBytes; } } catch (StandardException se) { throw new IOException( se.getMessage() ); } catch (SQLException se) { throw new IOException( se.getMessage() ); } } /** Write the length if using the 8.1 encoding. * @exception IOException io exception */ private void writeLength( ObjectOutput out, int len ) throws IOException { if (len <= 31) { out.write((byte) (0x80 | (len & 0xff))); } else if (len <= 0xFFFF) { out.write((byte) 0xA0); out.writeShort((short) len); } else { out.write((byte) 0xC0); out.writeInt(len); } } /** * delegated to bit * * @exception IOException io exception */ public final void readExternal(ObjectInput in) throws IOException { // need to clear stream first, in case this object is reused, and // stream is set by previous use. Track 3794. stream = null; streamValueLength = -1; _blobValue = null; int len = SQLBinary.readBinaryLength(in); if (len != 0) { dataValue = new byte[len]; in.readFully(dataValue); } else { readFromStream((InputStream) in); } } /** * Read the encoded length of the value from the on-disk format. * * @see SQLBinary */ private static int readBinaryLength(ObjectInput in) throws IOException { int bl = in.read(); if (bl == -1) throw new java.io.EOFException(); byte li = (byte) bl; int len; if ((li & ((byte) 0x80)) != 0) { if (li == ((byte) 0xC0)) { len = in.readInt(); } else if (li == ((byte) 0xA0)) { len = in.readUnsignedShort(); } else { len = li & 0x1F; } } else { // old length in bits int v2 = in.read(); int v3 = in.read(); int v4 = in.read(); if (v2 == -1 || v3 == -1 || v4 == -1) throw new java.io.EOFException(); int lenInBits = (((bl & 0xff) << 24) | ((v2 & 0xff) << 16) | ((v3 & 0xff) << 8) | (v4 & 0xff)); len = lenInBits / 8; if ((lenInBits % 8) != 0) len++; } return len; } /** * Read the value from an input stream. The length * encoded in the input stream has already been read * and determined to be unknown. */ private void readFromStream(InputStream in) throws IOException { dataValue = null; // allow gc of the old value before the new. byte[] tmpData = new byte[32 * 1024]; int off = 0; for (;;) { int len = in.read(tmpData, off, tmpData.length - off); if (len == -1) break; off += len; int available = Math.max(1, in.available()); int extraSpace = available - (tmpData.length - off); if (extraSpace > 0) { // need to grow the array int size = tmpData.length * 2; if (extraSpace > tmpData.length) size += extraSpace; byte[] grow = new byte[size]; System.arraycopy(tmpData, 0, grow, 0, off); tmpData = grow; } } dataValue = new byte[off]; System.arraycopy(tmpData, 0, dataValue, 0, off); } /** * @see org.apache.derby.iapi.services.io.Storable#restoreToNull */ public final void restoreToNull() { dataValue = null; _blobValue = null; stream = null; streamValueLength = -1; } /** @exception StandardException thrown on error */ public final boolean compare(int op, DataValueDescriptor other, boolean orderedNulls, boolean unknownRV) throws StandardException { if (!orderedNulls) // nulls are unordered { if (SanityManager.DEBUG) { int otherTypeFormatId = other.getTypeFormatId(); if (!((StoredFormatIds.SQL_BIT_ID == otherTypeFormatId) || (StoredFormatIds.SQL_VARBIT_ID == otherTypeFormatId) || (StoredFormatIds.SQL_LONGVARBIT_ID == otherTypeFormatId) || (StoredFormatIds.SQL_CHAR_ID == otherTypeFormatId) || (StoredFormatIds.SQL_VARCHAR_ID == otherTypeFormatId) || (StoredFormatIds.SQL_LONGVARCHAR_ID == otherTypeFormatId) || ((StoredFormatIds.SQL_BLOB_ID == otherTypeFormatId) && (StoredFormatIds.SQL_BLOB_ID == getTypeFormatId())) )) SanityManager.THROWASSERT( "An object of type " + other.getClass().getName() + ", with format id " + otherTypeFormatId + ", was passed to SQLBinary.compare()"); } if (this.isNull() || other.isNull()) return unknownRV; } /* Do the comparison */ return super.compare(op, other, orderedNulls, unknownRV); } /** @exception StandardException thrown on error */ public final int compare(DataValueDescriptor other) throws StandardException { /* Use compare method from dominant type, negating result * to reflect flipping of sides. */ if (typePrecedence() < other.typePrecedence()) { return - (other.compare(this)); } /* ** By convention, nulls sort High, and null == null */ if (this.isNull() || other.isNull()) { if (!isNull()) return -1; if (!other.isNull()) return 1; return 0; // both null } return SQLBinary.compare(getBytes(), other.getBytes()); } /** * Shallow clone a StreamStorable without objectifying. * This is used to avoid unnecessary objectifying of a stream object. * * Beetle 4896 */ public final DataValueDescriptor cloneHolder() { if (stream == null && _blobValue == null) { return cloneValue(false); } else { // Cast to SQLBinary to avoid having to catch StandardException. SQLBinary self = (SQLBinary)getNewNull(); if (stream != null) { // Just reference the same stream in the cloned holder. self.setValue(stream, streamValueLength); } else if (_blobValue != null) { // Just reference the same BLOB value in the cloned holder. self.setValue(_blobValue); } else { throw new IllegalStateException("unknown BLOB value repr"); } return self; } } /* * DataValueDescriptor interface */ /** @see DataValueDescriptor#cloneValue */ public DataValueDescriptor cloneValue(boolean forceMaterialization) { try { DataValueDescriptor cloneDVD = getNewNull(); cloneDVD.setValue(getValue()); return cloneDVD; } catch (StandardException se) { if (SanityManager.DEBUG) SanityManager.THROWASSERT("Unexpected exception", se); return null; } } /* * DataValueDescriptor interface */ /* * StreamStorable interface : */ public final InputStream returnStream() { return stream; } /** * Set me to the value represented by this stream. * The format of the stream is the on-disk format * described in this class's javadoc. That is the * length is encoded in the first few bytes of the * stream. */ public final void setStream(InputStream newStream) { this.dataValue = null; _blobValue = null; this.stream = newStream; streamValueLength = -1; } public final void loadStream() throws StandardException { getValue(); } /* * class interface */ boolean objectNull(Object o) { if (o == null) { setToNull(); return true; } return false; } /** * Set the value from the stream which is in the on-disk format. * @param theStream On disk format of the stream * @param valueLength length of the logical value in bytes, or * <code>DataValueDescriptor.UNKNOWN_LOGICAL_LENGTH</code> */ public final void setValue(InputStream theStream, int valueLength) { dataValue = null; _blobValue = null; stream = theStream; this.streamValueLength = valueLength; } protected final void setFrom(DataValueDescriptor theValue) throws StandardException { if (theValue instanceof SQLBinary) { SQLBinary theValueBinary = (SQLBinary) theValue; dataValue = theValueBinary.dataValue; _blobValue = theValueBinary._blobValue; stream = theValueBinary.stream; streamValueLength = theValueBinary.streamValueLength; } else { setValue(theValue.getBytes()); } } /* ** SQL Operators */ /** * The = operator as called from the language module, as opposed to * the storage module. * * @param left The value on the left side of the = * @param right The value on the right side of the = * is not. * @return A SQL boolean value telling whether the two parameters are equal * * @exception StandardException Thrown on error */ public final BooleanDataValue equals(DataValueDescriptor left, DataValueDescriptor right) throws StandardException { boolean isEqual; if (left.isNull() || right.isNull()) { isEqual = false; } else { isEqual = SQLBinary.compare(left.getBytes(), right.getBytes()) == 0; } return SQLBoolean.truthValue(left, right, isEqual); } /** * The &lt;&gt; operator as called from the language module, as opposed to * the storage module. * * @param left The value on the left side of the operator * @param right The value on the right side of the operator * * @return A SQL boolean value telling whether the two parameters * are not equal * * @exception StandardException Thrown on error */ public final BooleanDataValue notEquals(DataValueDescriptor left, DataValueDescriptor right) throws StandardException { boolean isNotEqual; if (left.isNull() || right.isNull()) { isNotEqual = false; } else { isNotEqual = SQLBinary.compare(left.getBytes(), right.getBytes()) != 0; } return SQLBoolean.truthValue(left, right, isNotEqual); } /** * The &lt; operator as called from the language module, as opposed to * the storage module. * * @param left The value on the left side of the operator * @param right The value on the right side of the operator * * @return A SQL boolean value telling whether the first operand is * less than the second operand * * @exception StandardException Thrown on error */ public final BooleanDataValue lessThan(DataValueDescriptor left, DataValueDescriptor right) throws StandardException { boolean isLessThan; if (left.isNull() || right.isNull()) { isLessThan = false; } else { isLessThan = SQLBinary.compare(left.getBytes(), right.getBytes()) < 0; } return SQLBoolean.truthValue(left, right, isLessThan); } /** * The &gt; operator as called from the language module, as opposed to * the storage module. * * @param left The value on the left side of the operator * @param right The value on the right side of the operator * * @return A SQL boolean value telling whether the first operand is * greater than the second operand * * @exception StandardException Thrown on error */ public final BooleanDataValue greaterThan(DataValueDescriptor left, DataValueDescriptor right) throws StandardException { boolean isGreaterThan = false; if (left.isNull() || right.isNull()) { isGreaterThan = false; } else { isGreaterThan = SQLBinary.compare(left.getBytes(), right.getBytes()) > 0; } return SQLBoolean.truthValue(left, right, isGreaterThan); } /** * The &lt;= operator as called from the language module, as opposed to * the storage module. * * @param left The value on the left side of the operator * @param right The value on the right side of the operator * * @return A SQL boolean value telling whether the first operand is * less than or equal to the second operand * * @exception StandardException Thrown on error */ public final BooleanDataValue lessOrEquals(DataValueDescriptor left, DataValueDescriptor right) throws StandardException { boolean isLessEquals = false; if (left.isNull() || right.isNull()) { isLessEquals = false; } else { isLessEquals = SQLBinary.compare(left.getBytes(), right.getBytes()) <= 0; } return SQLBoolean.truthValue(left, right, isLessEquals); } /** * The &gt;= operator as called from the language module, as opposed to * the storage module. * * @param left The value on the left side of the &gt;= * @param right The value on the right side of the &gt;= * * @return A SQL boolean value telling whether the first operand is * greater than or equal to the second operand * * @exception StandardException Thrown on error */ public final BooleanDataValue greaterOrEquals(DataValueDescriptor left, DataValueDescriptor right) throws StandardException { boolean isGreaterEquals = false; if (left.isNull() || right.isNull()) { isGreaterEquals = false; } else { isGreaterEquals = SQLBinary.compare(left.getBytes(), right.getBytes()) >= 0; } return SQLBoolean.truthValue(left, right, isGreaterEquals); } /** * * This method implements the char_length function for bit. * * @param result The result of a previous call to this method, null * if not called yet * * @return A SQLInteger containing the length of the char value * * @exception StandardException Thrown on error * * @see ConcatableDataValue#charLength */ public final NumberDataValue charLength(NumberDataValue result) throws StandardException { if (result == null) { result = new SQLInteger(); } if (this.isNull()) { result.setToNull(); return result; } result.setValue(getValue().length); return result; } /** * @see BitDataValue#concatenate * * @exception StandardException Thrown on error */ public final BitDataValue concatenate( BitDataValue left, BitDataValue right, BitDataValue result) throws StandardException { if (result == null) { result = (BitDataValue) getNewNull(); } if (left.isNull() || right.isNull()) { result.setToNull(); return result; } byte[] leftData = left.getBytes(); byte[] rightData = right.getBytes(); byte[] concatData = new byte[leftData.length + rightData.length]; System.arraycopy(leftData, 0, concatData, 0, leftData.length); System.arraycopy(rightData, 0, concatData, leftData.length, rightData.length); result.setValue(concatData); return result; } /** * The SQL substr() function. * * @param start Start of substr * @param length Length of substr * @param result The result of a previous call to this method, * null if not called yet. * @param maxLen Maximum length of the result * * @return A ConcatableDataValue containing the result of the substr() * * @exception StandardException Thrown on error */ public final ConcatableDataValue substring( NumberDataValue start, NumberDataValue length, ConcatableDataValue result, int maxLen) throws StandardException { int startInt; int lengthInt; BitDataValue varbitResult; if (result == null) { result = new SQLVarbit(); } varbitResult = (BitDataValue) result; /* The result is null if the receiver (this) is null or if the length is negative. * Oracle docs don't say what happens if the start position or the length is a usernull. * We will return null, which is the only sensible thing to do. * (If the user did not specify a length then length is not a user null.) */ if (this.isNull() || start.isNull() || (length != null && length.isNull())) { varbitResult.setToNull(); return varbitResult; } startInt = start.getInt(); // If length is not specified, make it till end of the string if (length != null) { lengthInt = length.getInt(); } else lengthInt = getLength() - startInt + 1; /* DB2 Compatibility: Added these checks to match DB2. We currently enforce these * limits in both modes. We could do these checks in DB2 mode only, if needed, so * leaving earlier code for out of range in for now, though will not be exercised */ if ((startInt <= 0 || lengthInt < 0 || startInt > getLength() || lengthInt > getLength() - startInt + 1)) throw StandardException.newException(SQLState.LANG_SUBSTR_START_OR_LEN_OUT_OF_RANGE); // Return null if length is non-positive if (lengthInt < 0) { varbitResult.setToNull(); return varbitResult; } /* If startInt < 0 then we count from the right of the string */ if (startInt < 0) { startInt += getLength(); if (startInt < 0) { lengthInt += startInt; startInt = 0; } if (lengthInt + startInt > 0) { lengthInt += startInt; } else { lengthInt = 0; } } else if (startInt > 0) { /* java substr() is 0 based */ startInt--; } /* Oracle docs don't say what happens if the window is to the * left of the string. Return "" if the window * is to the left or right or if the length is 0. */ if (lengthInt == 0 || lengthInt <= 0 - startInt || startInt > getLength()) { varbitResult.setValue(new byte[0]); return varbitResult; } if (lengthInt >= getLength() - startInt) { byte[] substring = new byte[dataValue.length - startInt]; System.arraycopy(dataValue, startInt, substring, 0, substring.length); varbitResult.setValue(substring); } else { byte[] substring = new byte[lengthInt]; System.arraycopy(dataValue, startInt, substring, 0, substring.length); varbitResult.setValue(substring); } return varbitResult; } /** Host variables are rejected if their length is bigger than the declared length, regardless of if the trailing bytes are the pad character. @exception StandardException Variable is too big. */ public final void checkHostVariable(int declaredLength) throws StandardException { // stream length checking occurs at the JDBC layer int variableLength = -1; if ( _blobValue != null ) { variableLength = -1; } else if (stream == null) { if (dataValue != null) variableLength = dataValue.length; } else { variableLength = streamValueLength; } if (variableLength != -1 && variableLength > declaredLength) throw StandardException.newException(SQLState.LANG_STRING_TRUNCATION, getTypeName(), MessageService.getTextMessage( MessageId.BINARY_DATA_HIDDEN), String.valueOf(declaredLength)); } /* * String display of value */ public final String toString() { if (dataValue == null) { if ((stream == null) && (_blobValue == null) ) { return "NULL"; } else { if (SanityManager.DEBUG) SanityManager.THROWASSERT( "value is null, stream or blob is not null"); return ""; } } else { return org.apache.derby.iapi.util.StringUtil.toHexString(dataValue, 0, dataValue.length); } } /* * Hash code */ public final int hashCode() { try { if (getValue() == null) { return 0; } } catch (StandardException se) { if (SanityManager.DEBUG) SanityManager.THROWASSERT("Unexpected exception", se); return 0; } // Hash code should ignore trailing PAD bytes. byte[] bytes = dataValue; int lastNonPadByte = bytes.length - 1; while (lastNonPadByte >= 0 && bytes[lastNonPadByte] == PAD) { lastNonPadByte--; } // Build the hash code in a way similar to String.hashCode() and // SQLChar.hashCode() int hashcode = 0; for (int i = 0; i <= lastNonPadByte; i++) { hashcode = hashcode * 31 + bytes[i]; } return hashcode; } private static int compare(byte[] left, byte[] right) { int minLen = left.length; byte[] longer = right; if (right.length < minLen) { minLen = right.length; longer = left; } for (int i = 0; i < minLen; i++) { int lb = left[i] & 0xff; int rb = right[i] & 0xff; if (lb == rb) continue; return lb - rb; } // complete match on all the bytes for the smallest value. // if the longer value is all pad characters // then the values are equal. for (int i = minLen; i < longer.length; i++) { byte nb = longer[i]; if (nb == SQLBinary.PAD) continue; // longer value is bigger. if (left == longer) return 1; return -1; } return 0; } /** Adding this method to ensure that super class' setInto method doesn't get called * that leads to the violation of JDBC spec( untyped nulls ) when batching is turned on. */ public void setInto(PreparedStatement ps, int position) throws SQLException, StandardException { ps.setBytes(position, getBytes()); } /** * Gets a trace representation for debugging. * * @return a trace representation of this SQL DataType. */ public final String getTraceString() throws StandardException { // Check if the value is SQL NULL. if (isNull()) { return "NULL"; } // Check if we have a stream. if (hasStream()) { return (getTypeName() + "(" + getStream().toString() + ")"); } return (getTypeName() + ":Length=" + getLength()); } private int getBlobLength() throws StandardException { try { long maxLength = Integer.MAX_VALUE; long length = _blobValue.length(); if ( length > Integer.MAX_VALUE ) { throw StandardException.newException ( SQLState.BLOB_TOO_LARGE_FOR_CLIENT, Long.toString( length ), Long.toString( maxLength ) ); } return (int) length; } catch (SQLException se) { throw StandardException.plainWrapException( se ); } } /** * Truncate this value to the desired width by removing bytes at the * end of the byte sequence. * * @param sourceWidth the original width in bytes (only used for * diagnostics, ignored if {@code warn} is {@code false}) * @param desiredWidth the desired width in bytes * @param warn whether or not to generate a truncation warning */ void truncate(int sourceWidth, int desiredWidth, boolean warn) throws StandardException { if (warn) { // SQL:2003, part 2, 6.12 <cast specification>, // general rule 12 says we should warn about truncation. DataTruncation warning = new DataTruncation( -1, // column index is unknown false, // parameter true, // read getLength(), desiredWidth); StatementContext statementContext = (StatementContext) DataValueFactoryImpl.getContext(ContextId.LANG_STATEMENT); statementContext.getActivation(). getResultSet().addWarning(warning); } // Truncate to the desired width. byte[] shrunkData = new byte[desiredWidth]; System.arraycopy(getValue(), 0, shrunkData, 0, desiredWidth); setValue(shrunkData); } }
googleapis/google-cloud-java
36,138
java-artifact-registry/proto-google-cloud-artifact-registry-v1beta2/src/main/java/com/google/devtools/artifactregistry/v1beta2/ImportAptArtifactsErrorInfo.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/devtools/artifactregistry/v1beta2/apt_artifact.proto // Protobuf Java Version: 3.25.8 package com.google.devtools.artifactregistry.v1beta2; /** * * * <pre> * Error information explaining why a package was not imported. * </pre> * * Protobuf type {@code google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo} */ public final class ImportAptArtifactsErrorInfo extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo) ImportAptArtifactsErrorInfoOrBuilder { private static final long serialVersionUID = 0L; // Use ImportAptArtifactsErrorInfo.newBuilder() to construct. private ImportAptArtifactsErrorInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ImportAptArtifactsErrorInfo() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ImportAptArtifactsErrorInfo(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.devtools.artifactregistry.v1beta2.AptArtifactProto .internal_static_google_devtools_artifactregistry_v1beta2_ImportAptArtifactsErrorInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.devtools.artifactregistry.v1beta2.AptArtifactProto .internal_static_google_devtools_artifactregistry_v1beta2_ImportAptArtifactsErrorInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo.class, com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo.Builder.class); } private int bitField0_; private int sourceCase_ = 0; @SuppressWarnings("serial") private java.lang.Object source_; public enum SourceCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { GCS_SOURCE(1), SOURCE_NOT_SET(0); private final int value; private SourceCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static SourceCase valueOf(int value) { return forNumber(value); } public static SourceCase forNumber(int value) { switch (value) { case 1: return GCS_SOURCE; case 0: return SOURCE_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public SourceCase getSourceCase() { return SourceCase.forNumber(sourceCase_); } public static final int GCS_SOURCE_FIELD_NUMBER = 1; /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource gcs_source = 1; * </code> * * @return Whether the gcsSource field is set. */ @java.lang.Override public boolean hasGcsSource() { return sourceCase_ == 1; } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource gcs_source = 1; * </code> * * @return The gcsSource. */ @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource getGcsSource() { if (sourceCase_ == 1) { return (com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource) source_; } return com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource .getDefaultInstance(); } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource gcs_source = 1; * </code> */ @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSourceOrBuilder getGcsSourceOrBuilder() { if (sourceCase_ == 1) { return (com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource) source_; } return com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource .getDefaultInstance(); } public static final int ERROR_FIELD_NUMBER = 2; private com.google.rpc.Status error_; /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> * * @return Whether the error field is set. */ @java.lang.Override public boolean hasError() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> * * @return The error. */ @java.lang.Override public com.google.rpc.Status getError() { return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_; } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ @java.lang.Override public com.google.rpc.StatusOrBuilder getErrorOrBuilder() { return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (sourceCase_ == 1) { output.writeMessage( 1, (com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource) source_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getError()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (sourceCase_ == 1) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 1, (com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource) source_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getError()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo)) { return super.equals(obj); } com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo other = (com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo) obj; if (hasError() != other.hasError()) return false; if (hasError()) { if (!getError().equals(other.getError())) return false; } if (!getSourceCase().equals(other.getSourceCase())) return false; switch (sourceCase_) { case 1: if (!getGcsSource().equals(other.getGcsSource())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasError()) { hash = (37 * hash) + ERROR_FIELD_NUMBER; hash = (53 * hash) + getError().hashCode(); } switch (sourceCase_) { case 1: hash = (37 * hash) + GCS_SOURCE_FIELD_NUMBER; hash = (53 * hash) + getGcsSource().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Error information explaining why a package was not imported. * </pre> * * Protobuf type {@code google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo) com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.devtools.artifactregistry.v1beta2.AptArtifactProto .internal_static_google_devtools_artifactregistry_v1beta2_ImportAptArtifactsErrorInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.devtools.artifactregistry.v1beta2.AptArtifactProto .internal_static_google_devtools_artifactregistry_v1beta2_ImportAptArtifactsErrorInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo.class, com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo.Builder .class); } // Construct using // com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getErrorFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (gcsSourceBuilder_ != null) { gcsSourceBuilder_.clear(); } error_ = null; if (errorBuilder_ != null) { errorBuilder_.dispose(); errorBuilder_ = null; } sourceCase_ = 0; source_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.devtools.artifactregistry.v1beta2.AptArtifactProto .internal_static_google_devtools_artifactregistry_v1beta2_ImportAptArtifactsErrorInfo_descriptor; } @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo getDefaultInstanceForType() { return com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo .getDefaultInstance(); } @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo build() { com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo buildPartial() { com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo result = new com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0( com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.error_ = errorBuilder_ == null ? error_ : errorBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } private void buildPartialOneofs( com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo result) { result.sourceCase_ = sourceCase_; result.source_ = this.source_; if (sourceCase_ == 1 && gcsSourceBuilder_ != null) { result.source_ = gcsSourceBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo) { return mergeFrom( (com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo other) { if (other == com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo .getDefaultInstance()) return this; if (other.hasError()) { mergeError(other.getError()); } switch (other.getSourceCase()) { case GCS_SOURCE: { mergeGcsSource(other.getGcsSource()); break; } case SOURCE_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getGcsSourceFieldBuilder().getBuilder(), extensionRegistry); sourceCase_ = 1; break; } // case 10 case 18: { input.readMessage(getErrorFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int sourceCase_ = 0; private java.lang.Object source_; public SourceCase getSourceCase() { return SourceCase.forNumber(sourceCase_); } public Builder clearSource() { sourceCase_ = 0; source_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.SingleFieldBuilderV3< com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource, com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource.Builder, com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSourceOrBuilder> gcsSourceBuilder_; /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource gcs_source = 1; * </code> * * @return Whether the gcsSource field is set. */ @java.lang.Override public boolean hasGcsSource() { return sourceCase_ == 1; } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource gcs_source = 1; * </code> * * @return The gcsSource. */ @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource getGcsSource() { if (gcsSourceBuilder_ == null) { if (sourceCase_ == 1) { return (com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource) source_; } return com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource .getDefaultInstance(); } else { if (sourceCase_ == 1) { return gcsSourceBuilder_.getMessage(); } return com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource .getDefaultInstance(); } } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource gcs_source = 1; * </code> */ public Builder setGcsSource( com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource value) { if (gcsSourceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } source_ = value; onChanged(); } else { gcsSourceBuilder_.setMessage(value); } sourceCase_ = 1; return this; } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource gcs_source = 1; * </code> */ public Builder setGcsSource( com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource.Builder builderForValue) { if (gcsSourceBuilder_ == null) { source_ = builderForValue.build(); onChanged(); } else { gcsSourceBuilder_.setMessage(builderForValue.build()); } sourceCase_ = 1; return this; } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource gcs_source = 1; * </code> */ public Builder mergeGcsSource( com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource value) { if (gcsSourceBuilder_ == null) { if (sourceCase_ == 1 && source_ != com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource .getDefaultInstance()) { source_ = com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource.newBuilder( (com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource) source_) .mergeFrom(value) .buildPartial(); } else { source_ = value; } onChanged(); } else { if (sourceCase_ == 1) { gcsSourceBuilder_.mergeFrom(value); } else { gcsSourceBuilder_.setMessage(value); } } sourceCase_ = 1; return this; } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource gcs_source = 1; * </code> */ public Builder clearGcsSource() { if (gcsSourceBuilder_ == null) { if (sourceCase_ == 1) { sourceCase_ = 0; source_ = null; onChanged(); } } else { if (sourceCase_ == 1) { sourceCase_ = 0; source_ = null; } gcsSourceBuilder_.clear(); } return this; } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource gcs_source = 1; * </code> */ public com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource.Builder getGcsSourceBuilder() { return getGcsSourceFieldBuilder().getBuilder(); } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource gcs_source = 1; * </code> */ @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSourceOrBuilder getGcsSourceOrBuilder() { if ((sourceCase_ == 1) && (gcsSourceBuilder_ != null)) { return gcsSourceBuilder_.getMessageOrBuilder(); } else { if (sourceCase_ == 1) { return (com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource) source_; } return com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource .getDefaultInstance(); } } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource gcs_source = 1; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource, com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource.Builder, com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSourceOrBuilder> getGcsSourceFieldBuilder() { if (gcsSourceBuilder_ == null) { if (!(sourceCase_ == 1)) { source_ = com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource .getDefaultInstance(); } gcsSourceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource, com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource.Builder, com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSourceOrBuilder>( (com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsGcsSource) source_, getParentForChildren(), isClean()); source_ = null; } sourceCase_ = 1; onChanged(); return gcsSourceBuilder_; } private com.google.rpc.Status error_; private com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> errorBuilder_; /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> * * @return Whether the error field is set. */ public boolean hasError() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> * * @return The error. */ public com.google.rpc.Status getError() { if (errorBuilder_ == null) { return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_; } else { return errorBuilder_.getMessage(); } } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public Builder setError(com.google.rpc.Status value) { if (errorBuilder_ == null) { if (value == null) { throw new NullPointerException(); } error_ = value; } else { errorBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public Builder setError(com.google.rpc.Status.Builder builderForValue) { if (errorBuilder_ == null) { error_ = builderForValue.build(); } else { errorBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public Builder mergeError(com.google.rpc.Status value) { if (errorBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && error_ != null && error_ != com.google.rpc.Status.getDefaultInstance()) { getErrorBuilder().mergeFrom(value); } else { error_ = value; } } else { errorBuilder_.mergeFrom(value); } if (error_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public Builder clearError() { bitField0_ = (bitField0_ & ~0x00000002); error_ = null; if (errorBuilder_ != null) { errorBuilder_.dispose(); errorBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public com.google.rpc.Status.Builder getErrorBuilder() { bitField0_ |= 0x00000002; onChanged(); return getErrorFieldBuilder().getBuilder(); } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public com.google.rpc.StatusOrBuilder getErrorOrBuilder() { if (errorBuilder_ != null) { return errorBuilder_.getMessageOrBuilder(); } else { return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_; } } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> getErrorFieldBuilder() { if (errorBuilder_ == null) { errorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>(getError(), getParentForChildren(), isClean()); error_ = null; } return errorBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo) } // @@protoc_insertion_point(class_scope:google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo) private static final com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo(); } public static com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ImportAptArtifactsErrorInfo> PARSER = new com.google.protobuf.AbstractParser<ImportAptArtifactsErrorInfo>() { @java.lang.Override public ImportAptArtifactsErrorInfo parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ImportAptArtifactsErrorInfo> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ImportAptArtifactsErrorInfo> getParserForType() { return PARSER; } @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.ImportAptArtifactsErrorInfo getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,138
java-artifact-registry/proto-google-cloud-artifact-registry-v1beta2/src/main/java/com/google/devtools/artifactregistry/v1beta2/ImportYumArtifactsErrorInfo.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/devtools/artifactregistry/v1beta2/yum_artifact.proto // Protobuf Java Version: 3.25.8 package com.google.devtools.artifactregistry.v1beta2; /** * * * <pre> * Error information explaining why a package was not imported. * </pre> * * Protobuf type {@code google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo} */ public final class ImportYumArtifactsErrorInfo extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo) ImportYumArtifactsErrorInfoOrBuilder { private static final long serialVersionUID = 0L; // Use ImportYumArtifactsErrorInfo.newBuilder() to construct. private ImportYumArtifactsErrorInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ImportYumArtifactsErrorInfo() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ImportYumArtifactsErrorInfo(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.devtools.artifactregistry.v1beta2.YumArtifactProto .internal_static_google_devtools_artifactregistry_v1beta2_ImportYumArtifactsErrorInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.devtools.artifactregistry.v1beta2.YumArtifactProto .internal_static_google_devtools_artifactregistry_v1beta2_ImportYumArtifactsErrorInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo.class, com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo.Builder.class); } private int bitField0_; private int sourceCase_ = 0; @SuppressWarnings("serial") private java.lang.Object source_; public enum SourceCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { GCS_SOURCE(1), SOURCE_NOT_SET(0); private final int value; private SourceCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static SourceCase valueOf(int value) { return forNumber(value); } public static SourceCase forNumber(int value) { switch (value) { case 1: return GCS_SOURCE; case 0: return SOURCE_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public SourceCase getSourceCase() { return SourceCase.forNumber(sourceCase_); } public static final int GCS_SOURCE_FIELD_NUMBER = 1; /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource gcs_source = 1; * </code> * * @return Whether the gcsSource field is set. */ @java.lang.Override public boolean hasGcsSource() { return sourceCase_ == 1; } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource gcs_source = 1; * </code> * * @return The gcsSource. */ @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource getGcsSource() { if (sourceCase_ == 1) { return (com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource) source_; } return com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource .getDefaultInstance(); } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource gcs_source = 1; * </code> */ @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSourceOrBuilder getGcsSourceOrBuilder() { if (sourceCase_ == 1) { return (com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource) source_; } return com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource .getDefaultInstance(); } public static final int ERROR_FIELD_NUMBER = 2; private com.google.rpc.Status error_; /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> * * @return Whether the error field is set. */ @java.lang.Override public boolean hasError() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> * * @return The error. */ @java.lang.Override public com.google.rpc.Status getError() { return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_; } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ @java.lang.Override public com.google.rpc.StatusOrBuilder getErrorOrBuilder() { return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (sourceCase_ == 1) { output.writeMessage( 1, (com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource) source_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getError()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (sourceCase_ == 1) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 1, (com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource) source_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getError()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo)) { return super.equals(obj); } com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo other = (com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo) obj; if (hasError() != other.hasError()) return false; if (hasError()) { if (!getError().equals(other.getError())) return false; } if (!getSourceCase().equals(other.getSourceCase())) return false; switch (sourceCase_) { case 1: if (!getGcsSource().equals(other.getGcsSource())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasError()) { hash = (37 * hash) + ERROR_FIELD_NUMBER; hash = (53 * hash) + getError().hashCode(); } switch (sourceCase_) { case 1: hash = (37 * hash) + GCS_SOURCE_FIELD_NUMBER; hash = (53 * hash) + getGcsSource().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Error information explaining why a package was not imported. * </pre> * * Protobuf type {@code google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo) com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.devtools.artifactregistry.v1beta2.YumArtifactProto .internal_static_google_devtools_artifactregistry_v1beta2_ImportYumArtifactsErrorInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.devtools.artifactregistry.v1beta2.YumArtifactProto .internal_static_google_devtools_artifactregistry_v1beta2_ImportYumArtifactsErrorInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo.class, com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo.Builder .class); } // Construct using // com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getErrorFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (gcsSourceBuilder_ != null) { gcsSourceBuilder_.clear(); } error_ = null; if (errorBuilder_ != null) { errorBuilder_.dispose(); errorBuilder_ = null; } sourceCase_ = 0; source_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.devtools.artifactregistry.v1beta2.YumArtifactProto .internal_static_google_devtools_artifactregistry_v1beta2_ImportYumArtifactsErrorInfo_descriptor; } @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo getDefaultInstanceForType() { return com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo .getDefaultInstance(); } @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo build() { com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo buildPartial() { com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo result = new com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0( com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.error_ = errorBuilder_ == null ? error_ : errorBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } private void buildPartialOneofs( com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo result) { result.sourceCase_ = sourceCase_; result.source_ = this.source_; if (sourceCase_ == 1 && gcsSourceBuilder_ != null) { result.source_ = gcsSourceBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo) { return mergeFrom( (com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo other) { if (other == com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo .getDefaultInstance()) return this; if (other.hasError()) { mergeError(other.getError()); } switch (other.getSourceCase()) { case GCS_SOURCE: { mergeGcsSource(other.getGcsSource()); break; } case SOURCE_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getGcsSourceFieldBuilder().getBuilder(), extensionRegistry); sourceCase_ = 1; break; } // case 10 case 18: { input.readMessage(getErrorFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int sourceCase_ = 0; private java.lang.Object source_; public SourceCase getSourceCase() { return SourceCase.forNumber(sourceCase_); } public Builder clearSource() { sourceCase_ = 0; source_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.SingleFieldBuilderV3< com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource, com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource.Builder, com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSourceOrBuilder> gcsSourceBuilder_; /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource gcs_source = 1; * </code> * * @return Whether the gcsSource field is set. */ @java.lang.Override public boolean hasGcsSource() { return sourceCase_ == 1; } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource gcs_source = 1; * </code> * * @return The gcsSource. */ @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource getGcsSource() { if (gcsSourceBuilder_ == null) { if (sourceCase_ == 1) { return (com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource) source_; } return com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource .getDefaultInstance(); } else { if (sourceCase_ == 1) { return gcsSourceBuilder_.getMessage(); } return com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource .getDefaultInstance(); } } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource gcs_source = 1; * </code> */ public Builder setGcsSource( com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource value) { if (gcsSourceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } source_ = value; onChanged(); } else { gcsSourceBuilder_.setMessage(value); } sourceCase_ = 1; return this; } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource gcs_source = 1; * </code> */ public Builder setGcsSource( com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource.Builder builderForValue) { if (gcsSourceBuilder_ == null) { source_ = builderForValue.build(); onChanged(); } else { gcsSourceBuilder_.setMessage(builderForValue.build()); } sourceCase_ = 1; return this; } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource gcs_source = 1; * </code> */ public Builder mergeGcsSource( com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource value) { if (gcsSourceBuilder_ == null) { if (sourceCase_ == 1 && source_ != com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource .getDefaultInstance()) { source_ = com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource.newBuilder( (com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource) source_) .mergeFrom(value) .buildPartial(); } else { source_ = value; } onChanged(); } else { if (sourceCase_ == 1) { gcsSourceBuilder_.mergeFrom(value); } else { gcsSourceBuilder_.setMessage(value); } } sourceCase_ = 1; return this; } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource gcs_source = 1; * </code> */ public Builder clearGcsSource() { if (gcsSourceBuilder_ == null) { if (sourceCase_ == 1) { sourceCase_ = 0; source_ = null; onChanged(); } } else { if (sourceCase_ == 1) { sourceCase_ = 0; source_ = null; } gcsSourceBuilder_.clear(); } return this; } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource gcs_source = 1; * </code> */ public com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource.Builder getGcsSourceBuilder() { return getGcsSourceFieldBuilder().getBuilder(); } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource gcs_source = 1; * </code> */ @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSourceOrBuilder getGcsSourceOrBuilder() { if ((sourceCase_ == 1) && (gcsSourceBuilder_ != null)) { return gcsSourceBuilder_.getMessageOrBuilder(); } else { if (sourceCase_ == 1) { return (com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource) source_; } return com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource .getDefaultInstance(); } } /** * * * <pre> * Google Cloud Storage location requested. * </pre> * * <code>.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource gcs_source = 1; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource, com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource.Builder, com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSourceOrBuilder> getGcsSourceFieldBuilder() { if (gcsSourceBuilder_ == null) { if (!(sourceCase_ == 1)) { source_ = com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource .getDefaultInstance(); } gcsSourceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource, com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource.Builder, com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSourceOrBuilder>( (com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsGcsSource) source_, getParentForChildren(), isClean()); source_ = null; } sourceCase_ = 1; onChanged(); return gcsSourceBuilder_; } private com.google.rpc.Status error_; private com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> errorBuilder_; /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> * * @return Whether the error field is set. */ public boolean hasError() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> * * @return The error. */ public com.google.rpc.Status getError() { if (errorBuilder_ == null) { return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_; } else { return errorBuilder_.getMessage(); } } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public Builder setError(com.google.rpc.Status value) { if (errorBuilder_ == null) { if (value == null) { throw new NullPointerException(); } error_ = value; } else { errorBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public Builder setError(com.google.rpc.Status.Builder builderForValue) { if (errorBuilder_ == null) { error_ = builderForValue.build(); } else { errorBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public Builder mergeError(com.google.rpc.Status value) { if (errorBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && error_ != null && error_ != com.google.rpc.Status.getDefaultInstance()) { getErrorBuilder().mergeFrom(value); } else { error_ = value; } } else { errorBuilder_.mergeFrom(value); } if (error_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public Builder clearError() { bitField0_ = (bitField0_ & ~0x00000002); error_ = null; if (errorBuilder_ != null) { errorBuilder_.dispose(); errorBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public com.google.rpc.Status.Builder getErrorBuilder() { bitField0_ |= 0x00000002; onChanged(); return getErrorFieldBuilder().getBuilder(); } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ public com.google.rpc.StatusOrBuilder getErrorOrBuilder() { if (errorBuilder_ != null) { return errorBuilder_.getMessageOrBuilder(); } else { return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_; } } /** * * * <pre> * The detailed error status. * </pre> * * <code>.google.rpc.Status error = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> getErrorFieldBuilder() { if (errorBuilder_ == null) { errorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>(getError(), getParentForChildren(), isClean()); error_ = null; } return errorBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo) } // @@protoc_insertion_point(class_scope:google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo) private static final com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo(); } public static com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ImportYumArtifactsErrorInfo> PARSER = new com.google.protobuf.AbstractParser<ImportYumArtifactsErrorInfo>() { @java.lang.Override public ImportYumArtifactsErrorInfo parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ImportYumArtifactsErrorInfo> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ImportYumArtifactsErrorInfo> getParserForType() { return PARSER; } @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.ImportYumArtifactsErrorInfo getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,266
java-gsuite-addons/proto-google-apps-script-type-protos/src/main/java/com/google/apps/script/type/drive/DriveAddOnManifest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/apps/script/type/drive/drive_addon_manifest.proto // Protobuf Java Version: 3.25.8 package com.google.apps.script.type.drive; /** * * * <pre> * Drive add-on manifest. * </pre> * * Protobuf type {@code google.apps.script.type.drive.DriveAddOnManifest} */ public final class DriveAddOnManifest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.apps.script.type.drive.DriveAddOnManifest) DriveAddOnManifestOrBuilder { private static final long serialVersionUID = 0L; // Use DriveAddOnManifest.newBuilder() to construct. private DriveAddOnManifest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DriveAddOnManifest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new DriveAddOnManifest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.apps.script.type.drive.DriveAddOnManifestProto .internal_static_google_apps_script_type_drive_DriveAddOnManifest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.apps.script.type.drive.DriveAddOnManifestProto .internal_static_google_apps_script_type_drive_DriveAddOnManifest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.apps.script.type.drive.DriveAddOnManifest.class, com.google.apps.script.type.drive.DriveAddOnManifest.Builder.class); } private int bitField0_; public static final int HOMEPAGE_TRIGGER_FIELD_NUMBER = 1; private com.google.apps.script.type.HomepageExtensionPoint homepageTrigger_; /** * * * <pre> * If present, this overrides the configuration from * `addOns.common.homepageTrigger`. * </pre> * * <code>.google.apps.script.type.HomepageExtensionPoint homepage_trigger = 1;</code> * * @return Whether the homepageTrigger field is set. */ @java.lang.Override public boolean hasHomepageTrigger() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * If present, this overrides the configuration from * `addOns.common.homepageTrigger`. * </pre> * * <code>.google.apps.script.type.HomepageExtensionPoint homepage_trigger = 1;</code> * * @return The homepageTrigger. */ @java.lang.Override public com.google.apps.script.type.HomepageExtensionPoint getHomepageTrigger() { return homepageTrigger_ == null ? com.google.apps.script.type.HomepageExtensionPoint.getDefaultInstance() : homepageTrigger_; } /** * * * <pre> * If present, this overrides the configuration from * `addOns.common.homepageTrigger`. * </pre> * * <code>.google.apps.script.type.HomepageExtensionPoint homepage_trigger = 1;</code> */ @java.lang.Override public com.google.apps.script.type.HomepageExtensionPointOrBuilder getHomepageTriggerOrBuilder() { return homepageTrigger_ == null ? com.google.apps.script.type.HomepageExtensionPoint.getDefaultInstance() : homepageTrigger_; } public static final int ON_ITEMS_SELECTED_TRIGGER_FIELD_NUMBER = 2; private com.google.apps.script.type.drive.DriveExtensionPoint onItemsSelectedTrigger_; /** * * * <pre> * Corresponds to behvior that should execute when items are selected * in relevant Drive view (e.g. the My Drive Doclist). * </pre> * * <code>.google.apps.script.type.drive.DriveExtensionPoint on_items_selected_trigger = 2;</code> * * @return Whether the onItemsSelectedTrigger field is set. */ @java.lang.Override public boolean hasOnItemsSelectedTrigger() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Corresponds to behvior that should execute when items are selected * in relevant Drive view (e.g. the My Drive Doclist). * </pre> * * <code>.google.apps.script.type.drive.DriveExtensionPoint on_items_selected_trigger = 2;</code> * * @return The onItemsSelectedTrigger. */ @java.lang.Override public com.google.apps.script.type.drive.DriveExtensionPoint getOnItemsSelectedTrigger() { return onItemsSelectedTrigger_ == null ? com.google.apps.script.type.drive.DriveExtensionPoint.getDefaultInstance() : onItemsSelectedTrigger_; } /** * * * <pre> * Corresponds to behvior that should execute when items are selected * in relevant Drive view (e.g. the My Drive Doclist). * </pre> * * <code>.google.apps.script.type.drive.DriveExtensionPoint on_items_selected_trigger = 2;</code> */ @java.lang.Override public com.google.apps.script.type.drive.DriveExtensionPointOrBuilder getOnItemsSelectedTriggerOrBuilder() { return onItemsSelectedTrigger_ == null ? com.google.apps.script.type.drive.DriveExtensionPoint.getDefaultInstance() : onItemsSelectedTrigger_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getHomepageTrigger()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getOnItemsSelectedTrigger()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getHomepageTrigger()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getOnItemsSelectedTrigger()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.apps.script.type.drive.DriveAddOnManifest)) { return super.equals(obj); } com.google.apps.script.type.drive.DriveAddOnManifest other = (com.google.apps.script.type.drive.DriveAddOnManifest) obj; if (hasHomepageTrigger() != other.hasHomepageTrigger()) return false; if (hasHomepageTrigger()) { if (!getHomepageTrigger().equals(other.getHomepageTrigger())) return false; } if (hasOnItemsSelectedTrigger() != other.hasOnItemsSelectedTrigger()) return false; if (hasOnItemsSelectedTrigger()) { if (!getOnItemsSelectedTrigger().equals(other.getOnItemsSelectedTrigger())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasHomepageTrigger()) { hash = (37 * hash) + HOMEPAGE_TRIGGER_FIELD_NUMBER; hash = (53 * hash) + getHomepageTrigger().hashCode(); } if (hasOnItemsSelectedTrigger()) { hash = (37 * hash) + ON_ITEMS_SELECTED_TRIGGER_FIELD_NUMBER; hash = (53 * hash) + getOnItemsSelectedTrigger().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.apps.script.type.drive.DriveAddOnManifest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.apps.script.type.drive.DriveAddOnManifest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.apps.script.type.drive.DriveAddOnManifest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.apps.script.type.drive.DriveAddOnManifest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.apps.script.type.drive.DriveAddOnManifest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.apps.script.type.drive.DriveAddOnManifest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.apps.script.type.drive.DriveAddOnManifest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.apps.script.type.drive.DriveAddOnManifest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.apps.script.type.drive.DriveAddOnManifest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.apps.script.type.drive.DriveAddOnManifest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.apps.script.type.drive.DriveAddOnManifest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.apps.script.type.drive.DriveAddOnManifest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.apps.script.type.drive.DriveAddOnManifest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Drive add-on manifest. * </pre> * * Protobuf type {@code google.apps.script.type.drive.DriveAddOnManifest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.apps.script.type.drive.DriveAddOnManifest) com.google.apps.script.type.drive.DriveAddOnManifestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.apps.script.type.drive.DriveAddOnManifestProto .internal_static_google_apps_script_type_drive_DriveAddOnManifest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.apps.script.type.drive.DriveAddOnManifestProto .internal_static_google_apps_script_type_drive_DriveAddOnManifest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.apps.script.type.drive.DriveAddOnManifest.class, com.google.apps.script.type.drive.DriveAddOnManifest.Builder.class); } // Construct using com.google.apps.script.type.drive.DriveAddOnManifest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getHomepageTriggerFieldBuilder(); getOnItemsSelectedTriggerFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; homepageTrigger_ = null; if (homepageTriggerBuilder_ != null) { homepageTriggerBuilder_.dispose(); homepageTriggerBuilder_ = null; } onItemsSelectedTrigger_ = null; if (onItemsSelectedTriggerBuilder_ != null) { onItemsSelectedTriggerBuilder_.dispose(); onItemsSelectedTriggerBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.apps.script.type.drive.DriveAddOnManifestProto .internal_static_google_apps_script_type_drive_DriveAddOnManifest_descriptor; } @java.lang.Override public com.google.apps.script.type.drive.DriveAddOnManifest getDefaultInstanceForType() { return com.google.apps.script.type.drive.DriveAddOnManifest.getDefaultInstance(); } @java.lang.Override public com.google.apps.script.type.drive.DriveAddOnManifest build() { com.google.apps.script.type.drive.DriveAddOnManifest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.apps.script.type.drive.DriveAddOnManifest buildPartial() { com.google.apps.script.type.drive.DriveAddOnManifest result = new com.google.apps.script.type.drive.DriveAddOnManifest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.apps.script.type.drive.DriveAddOnManifest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.homepageTrigger_ = homepageTriggerBuilder_ == null ? homepageTrigger_ : homepageTriggerBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.onItemsSelectedTrigger_ = onItemsSelectedTriggerBuilder_ == null ? onItemsSelectedTrigger_ : onItemsSelectedTriggerBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.apps.script.type.drive.DriveAddOnManifest) { return mergeFrom((com.google.apps.script.type.drive.DriveAddOnManifest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.apps.script.type.drive.DriveAddOnManifest other) { if (other == com.google.apps.script.type.drive.DriveAddOnManifest.getDefaultInstance()) return this; if (other.hasHomepageTrigger()) { mergeHomepageTrigger(other.getHomepageTrigger()); } if (other.hasOnItemsSelectedTrigger()) { mergeOnItemsSelectedTrigger(other.getOnItemsSelectedTrigger()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getHomepageTriggerFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getOnItemsSelectedTriggerFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.apps.script.type.HomepageExtensionPoint homepageTrigger_; private com.google.protobuf.SingleFieldBuilderV3< com.google.apps.script.type.HomepageExtensionPoint, com.google.apps.script.type.HomepageExtensionPoint.Builder, com.google.apps.script.type.HomepageExtensionPointOrBuilder> homepageTriggerBuilder_; /** * * * <pre> * If present, this overrides the configuration from * `addOns.common.homepageTrigger`. * </pre> * * <code>.google.apps.script.type.HomepageExtensionPoint homepage_trigger = 1;</code> * * @return Whether the homepageTrigger field is set. */ public boolean hasHomepageTrigger() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * If present, this overrides the configuration from * `addOns.common.homepageTrigger`. * </pre> * * <code>.google.apps.script.type.HomepageExtensionPoint homepage_trigger = 1;</code> * * @return The homepageTrigger. */ public com.google.apps.script.type.HomepageExtensionPoint getHomepageTrigger() { if (homepageTriggerBuilder_ == null) { return homepageTrigger_ == null ? com.google.apps.script.type.HomepageExtensionPoint.getDefaultInstance() : homepageTrigger_; } else { return homepageTriggerBuilder_.getMessage(); } } /** * * * <pre> * If present, this overrides the configuration from * `addOns.common.homepageTrigger`. * </pre> * * <code>.google.apps.script.type.HomepageExtensionPoint homepage_trigger = 1;</code> */ public Builder setHomepageTrigger(com.google.apps.script.type.HomepageExtensionPoint value) { if (homepageTriggerBuilder_ == null) { if (value == null) { throw new NullPointerException(); } homepageTrigger_ = value; } else { homepageTriggerBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * If present, this overrides the configuration from * `addOns.common.homepageTrigger`. * </pre> * * <code>.google.apps.script.type.HomepageExtensionPoint homepage_trigger = 1;</code> */ public Builder setHomepageTrigger( com.google.apps.script.type.HomepageExtensionPoint.Builder builderForValue) { if (homepageTriggerBuilder_ == null) { homepageTrigger_ = builderForValue.build(); } else { homepageTriggerBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * If present, this overrides the configuration from * `addOns.common.homepageTrigger`. * </pre> * * <code>.google.apps.script.type.HomepageExtensionPoint homepage_trigger = 1;</code> */ public Builder mergeHomepageTrigger(com.google.apps.script.type.HomepageExtensionPoint value) { if (homepageTriggerBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && homepageTrigger_ != null && homepageTrigger_ != com.google.apps.script.type.HomepageExtensionPoint.getDefaultInstance()) { getHomepageTriggerBuilder().mergeFrom(value); } else { homepageTrigger_ = value; } } else { homepageTriggerBuilder_.mergeFrom(value); } if (homepageTrigger_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * If present, this overrides the configuration from * `addOns.common.homepageTrigger`. * </pre> * * <code>.google.apps.script.type.HomepageExtensionPoint homepage_trigger = 1;</code> */ public Builder clearHomepageTrigger() { bitField0_ = (bitField0_ & ~0x00000001); homepageTrigger_ = null; if (homepageTriggerBuilder_ != null) { homepageTriggerBuilder_.dispose(); homepageTriggerBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * If present, this overrides the configuration from * `addOns.common.homepageTrigger`. * </pre> * * <code>.google.apps.script.type.HomepageExtensionPoint homepage_trigger = 1;</code> */ public com.google.apps.script.type.HomepageExtensionPoint.Builder getHomepageTriggerBuilder() { bitField0_ |= 0x00000001; onChanged(); return getHomepageTriggerFieldBuilder().getBuilder(); } /** * * * <pre> * If present, this overrides the configuration from * `addOns.common.homepageTrigger`. * </pre> * * <code>.google.apps.script.type.HomepageExtensionPoint homepage_trigger = 1;</code> */ public com.google.apps.script.type.HomepageExtensionPointOrBuilder getHomepageTriggerOrBuilder() { if (homepageTriggerBuilder_ != null) { return homepageTriggerBuilder_.getMessageOrBuilder(); } else { return homepageTrigger_ == null ? com.google.apps.script.type.HomepageExtensionPoint.getDefaultInstance() : homepageTrigger_; } } /** * * * <pre> * If present, this overrides the configuration from * `addOns.common.homepageTrigger`. * </pre> * * <code>.google.apps.script.type.HomepageExtensionPoint homepage_trigger = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.apps.script.type.HomepageExtensionPoint, com.google.apps.script.type.HomepageExtensionPoint.Builder, com.google.apps.script.type.HomepageExtensionPointOrBuilder> getHomepageTriggerFieldBuilder() { if (homepageTriggerBuilder_ == null) { homepageTriggerBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.apps.script.type.HomepageExtensionPoint, com.google.apps.script.type.HomepageExtensionPoint.Builder, com.google.apps.script.type.HomepageExtensionPointOrBuilder>( getHomepageTrigger(), getParentForChildren(), isClean()); homepageTrigger_ = null; } return homepageTriggerBuilder_; } private com.google.apps.script.type.drive.DriveExtensionPoint onItemsSelectedTrigger_; private com.google.protobuf.SingleFieldBuilderV3< com.google.apps.script.type.drive.DriveExtensionPoint, com.google.apps.script.type.drive.DriveExtensionPoint.Builder, com.google.apps.script.type.drive.DriveExtensionPointOrBuilder> onItemsSelectedTriggerBuilder_; /** * * * <pre> * Corresponds to behvior that should execute when items are selected * in relevant Drive view (e.g. the My Drive Doclist). * </pre> * * <code>.google.apps.script.type.drive.DriveExtensionPoint on_items_selected_trigger = 2; * </code> * * @return Whether the onItemsSelectedTrigger field is set. */ public boolean hasOnItemsSelectedTrigger() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Corresponds to behvior that should execute when items are selected * in relevant Drive view (e.g. the My Drive Doclist). * </pre> * * <code>.google.apps.script.type.drive.DriveExtensionPoint on_items_selected_trigger = 2; * </code> * * @return The onItemsSelectedTrigger. */ public com.google.apps.script.type.drive.DriveExtensionPoint getOnItemsSelectedTrigger() { if (onItemsSelectedTriggerBuilder_ == null) { return onItemsSelectedTrigger_ == null ? com.google.apps.script.type.drive.DriveExtensionPoint.getDefaultInstance() : onItemsSelectedTrigger_; } else { return onItemsSelectedTriggerBuilder_.getMessage(); } } /** * * * <pre> * Corresponds to behvior that should execute when items are selected * in relevant Drive view (e.g. the My Drive Doclist). * </pre> * * <code>.google.apps.script.type.drive.DriveExtensionPoint on_items_selected_trigger = 2; * </code> */ public Builder setOnItemsSelectedTrigger( com.google.apps.script.type.drive.DriveExtensionPoint value) { if (onItemsSelectedTriggerBuilder_ == null) { if (value == null) { throw new NullPointerException(); } onItemsSelectedTrigger_ = value; } else { onItemsSelectedTriggerBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Corresponds to behvior that should execute when items are selected * in relevant Drive view (e.g. the My Drive Doclist). * </pre> * * <code>.google.apps.script.type.drive.DriveExtensionPoint on_items_selected_trigger = 2; * </code> */ public Builder setOnItemsSelectedTrigger( com.google.apps.script.type.drive.DriveExtensionPoint.Builder builderForValue) { if (onItemsSelectedTriggerBuilder_ == null) { onItemsSelectedTrigger_ = builderForValue.build(); } else { onItemsSelectedTriggerBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Corresponds to behvior that should execute when items are selected * in relevant Drive view (e.g. the My Drive Doclist). * </pre> * * <code>.google.apps.script.type.drive.DriveExtensionPoint on_items_selected_trigger = 2; * </code> */ public Builder mergeOnItemsSelectedTrigger( com.google.apps.script.type.drive.DriveExtensionPoint value) { if (onItemsSelectedTriggerBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && onItemsSelectedTrigger_ != null && onItemsSelectedTrigger_ != com.google.apps.script.type.drive.DriveExtensionPoint.getDefaultInstance()) { getOnItemsSelectedTriggerBuilder().mergeFrom(value); } else { onItemsSelectedTrigger_ = value; } } else { onItemsSelectedTriggerBuilder_.mergeFrom(value); } if (onItemsSelectedTrigger_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Corresponds to behvior that should execute when items are selected * in relevant Drive view (e.g. the My Drive Doclist). * </pre> * * <code>.google.apps.script.type.drive.DriveExtensionPoint on_items_selected_trigger = 2; * </code> */ public Builder clearOnItemsSelectedTrigger() { bitField0_ = (bitField0_ & ~0x00000002); onItemsSelectedTrigger_ = null; if (onItemsSelectedTriggerBuilder_ != null) { onItemsSelectedTriggerBuilder_.dispose(); onItemsSelectedTriggerBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Corresponds to behvior that should execute when items are selected * in relevant Drive view (e.g. the My Drive Doclist). * </pre> * * <code>.google.apps.script.type.drive.DriveExtensionPoint on_items_selected_trigger = 2; * </code> */ public com.google.apps.script.type.drive.DriveExtensionPoint.Builder getOnItemsSelectedTriggerBuilder() { bitField0_ |= 0x00000002; onChanged(); return getOnItemsSelectedTriggerFieldBuilder().getBuilder(); } /** * * * <pre> * Corresponds to behvior that should execute when items are selected * in relevant Drive view (e.g. the My Drive Doclist). * </pre> * * <code>.google.apps.script.type.drive.DriveExtensionPoint on_items_selected_trigger = 2; * </code> */ public com.google.apps.script.type.drive.DriveExtensionPointOrBuilder getOnItemsSelectedTriggerOrBuilder() { if (onItemsSelectedTriggerBuilder_ != null) { return onItemsSelectedTriggerBuilder_.getMessageOrBuilder(); } else { return onItemsSelectedTrigger_ == null ? com.google.apps.script.type.drive.DriveExtensionPoint.getDefaultInstance() : onItemsSelectedTrigger_; } } /** * * * <pre> * Corresponds to behvior that should execute when items are selected * in relevant Drive view (e.g. the My Drive Doclist). * </pre> * * <code>.google.apps.script.type.drive.DriveExtensionPoint on_items_selected_trigger = 2; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.apps.script.type.drive.DriveExtensionPoint, com.google.apps.script.type.drive.DriveExtensionPoint.Builder, com.google.apps.script.type.drive.DriveExtensionPointOrBuilder> getOnItemsSelectedTriggerFieldBuilder() { if (onItemsSelectedTriggerBuilder_ == null) { onItemsSelectedTriggerBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.apps.script.type.drive.DriveExtensionPoint, com.google.apps.script.type.drive.DriveExtensionPoint.Builder, com.google.apps.script.type.drive.DriveExtensionPointOrBuilder>( getOnItemsSelectedTrigger(), getParentForChildren(), isClean()); onItemsSelectedTrigger_ = null; } return onItemsSelectedTriggerBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.apps.script.type.drive.DriveAddOnManifest) } // @@protoc_insertion_point(class_scope:google.apps.script.type.drive.DriveAddOnManifest) private static final com.google.apps.script.type.drive.DriveAddOnManifest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.apps.script.type.drive.DriveAddOnManifest(); } public static com.google.apps.script.type.drive.DriveAddOnManifest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DriveAddOnManifest> PARSER = new com.google.protobuf.AbstractParser<DriveAddOnManifest>() { @java.lang.Override public DriveAddOnManifest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<DriveAddOnManifest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DriveAddOnManifest> getParserForType() { return PARSER; } @java.lang.Override public com.google.apps.script.type.drive.DriveAddOnManifest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/rocketmq-connect
36,401
connectors/rocketmq-replicator/src/main/java/org/apache/rocketmq/replicator/ReplicatorSourceTask.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.rocketmq.replicator; import com.alibaba.fastjson.JSON; import io.openmessaging.KeyValue; import io.openmessaging.connector.api.component.task.source.SourceTask; import io.openmessaging.connector.api.data.*; import io.openmessaging.internal.DefaultKeyValue; import org.apache.commons.lang3.StringUtils; import org.apache.rocketmq.acl.common.AclClientRPCHook; import org.apache.rocketmq.acl.common.SessionCredentials; import org.apache.rocketmq.client.consumer.DefaultLitePullConsumer; import org.apache.rocketmq.client.exception.MQBrokerException; import org.apache.rocketmq.client.exception.MQClientException; import org.apache.rocketmq.common.MixAll; import org.apache.rocketmq.common.UtilAll; import org.apache.rocketmq.common.admin.ConsumeStats; import org.apache.rocketmq.common.admin.OffsetWrapper; import org.apache.rocketmq.common.message.MessageConst; import org.apache.rocketmq.common.message.MessageExt; import org.apache.rocketmq.common.message.MessageQueue; import org.apache.rocketmq.common.protocol.body.ClusterInfo; import org.apache.rocketmq.common.protocol.route.BrokerData; import org.apache.rocketmq.common.subscription.SubscriptionGroupConfig; import org.apache.rocketmq.remoting.RPCHook; import org.apache.rocketmq.remoting.exception.RemotingConnectException; import org.apache.rocketmq.remoting.exception.RemotingException; import org.apache.rocketmq.remoting.exception.RemotingSendRequestException; import org.apache.rocketmq.remoting.exception.RemotingTimeoutException; import org.apache.rocketmq.replicator.common.LoggerName; import org.apache.rocketmq.replicator.config.ConsumeFromWhere; import org.apache.rocketmq.replicator.config.FailoverStrategy; import org.apache.rocketmq.replicator.config.ReplicatorConnectorConfig; import org.apache.rocketmq.replicator.context.UnAckMessage; import org.apache.rocketmq.replicator.exception.StartTaskException; import org.apache.rocketmq.replicator.stats.TpsLimiter; import org.apache.rocketmq.replicator.utils.ReplicatorUtils; import org.apache.rocketmq.tools.admin.DefaultMQAdminExt; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.nio.charset.StandardCharsets; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.function.BiConsumer; import static org.apache.rocketmq.replicator.utils.ReplicatorUtils.QUEUE_OFFSET; /** * @author osgoo */ public class ReplicatorSourceTask extends SourceTask { private static final Logger log = LoggerFactory.getLogger(ReplicatorSourceTask.class); private static final Logger buglog = LoggerFactory.getLogger(LoggerName.CONNECT_BUG); private static final Logger workerErrorMsgLog = LoggerFactory.getLogger(LoggerName.WORKER_ERROR_MSG_ID); private ReplicatorConnectorConfig connectorConfig = new ReplicatorConnectorConfig(); private DefaultMQAdminExt srcMQAdminExt; private ScheduledExecutorService metricsMonitorExecutorService = Executors.newSingleThreadScheduledExecutor(new ThreadFactory() { @Override public Thread newThread(Runnable r) { return new Thread(r, "Replicator_lag_metrics"); } }); private ScheduledExecutorService commitOffsetScheduleService = Executors.newSingleThreadScheduledExecutor(new ThreadFactory() { @Override public Thread newThread(Runnable r) { return new Thread(r, "Commit_offset_schedule"); } }); private Map<String, List<String>> metricsItem2KeyMap = new HashMap<>(); private final long period = 60 * 1000; private DefaultLitePullConsumer pullConsumer; private AtomicLong noMessageCounter = new AtomicLong(); private Random random = new Random(); private final int printLogThreshold = 100000; private int tpsLimit; private AtomicInteger unAckCounter = new AtomicInteger(); private static final int MAX_UNACK = 5000; private ConcurrentHashMap<MessageQueue, TreeMap<Long/* offset */, UnAckMessage/* can commit */>> queue2Offsets = new ConcurrentHashMap<>(); private ConcurrentHashMap<MessageQueue, Long> mq2MaxOffsets = new ConcurrentHashMap<>(); private ConcurrentHashMap<MessageQueue, ReadWriteLock> locks = new ConcurrentHashMap<>(); private List<MessageQueue> normalQueues = new ArrayList<>(); private AtomicLong circleReplicateCounter = new AtomicLong(); private ConcurrentHashMap<MessageQueue, AtomicLong> prepareCommitOffset = new ConcurrentHashMap<>(); private AtomicInteger pollCounter = new AtomicInteger(); private AtomicInteger rateCounter = new AtomicInteger(); private static final String REPLICATOR_SRC_TOPIC_PROPERTY_KEY = "REPLICATOR-source-topic"; // msg born timestamp on src private static final String REPLICATOR_BORN_SOURCE_TIMESTAMP = "REPLICATOR-BORN-SOURCE-TIMESTAMP"; // msg born from where private static final String REPLICATOR_BORN_SOURCE_CLOUD_CLUSTER_REGION = "REPLICATOR-BORN-SOURCE"; // msg born from which topic private static final String REPLICATOR_BORE_INSTANCEID_TOPIC = "REPLICATOR-BORN-TOPIC"; // src message id equals MessageConst.PROPERTY_EXTEND_UNIQ_INFO private static final String REPLICATOR_SRC_MESSAGE_ID = "EXTEND_UNIQ_INFO"; // src dup info equals MessageConst.DUP_INFO private static final String REPLICATOR_DUP_INFO = "DUP_INFO"; // following sys reserved properties public static final String PROPERTY_TIMER_DELAY_SEC = "TIMER_DELAY_SEC"; public static final String PROPERTY_TIMER_DELIVER_MS = "TIMER_DELIVER_MS"; public static final String PROPERTY_TIMER_IN_MS = "TIMER_IN_MS"; public static final String PROPERTY_TIMER_OUT_MS = "TIMER_OUT_MS"; public static final String PROPERTY_TIMER_ENQUEUE_MS = "TIMER_ENQUEUE_MS"; public static final String PROPERTY_TIMER_DEQUEUE_MS = "TIMER_DEQUEUE_MS"; public static final String PROPERTY_TIMER_ROLL_TIMES = "TIMER_ROLL_TIMES"; public static final String PROPERTY_TIMER_DEL_UNIQKEY = "TIMER_DEL_UNIQKEY"; public static final String PROPERTY_TIMER_DELAY_LEVEL = "TIMER_DELAY_LEVEL"; public static final String PROPERTY_POP_CK = "POP_CK"; public static final String PROPERTY_POP_CK_OFFSET = "POP_CK_OFFSET"; public static final String PROPERTY_FIRST_POP_TIME = "1ST_POP_TIME"; public static final String PROPERTY_VTOA_TUNNEL_ID = "VTOA_TUNNEL_ID"; private static final Set<String> MQ_SYS_KEYS = new HashSet<String>() { { add(MessageConst.PROPERTY_MIN_OFFSET); add(MessageConst.PROPERTY_TRACE_SWITCH); add(MessageConst.PROPERTY_MAX_OFFSET); add(MessageConst.PROPERTY_MSG_REGION); add(MessageConst.PROPERTY_REAL_TOPIC); add(MessageConst.PROPERTY_REAL_QUEUE_ID); add(MessageConst.PROPERTY_PRODUCER_GROUP); add(MessageConst.PROPERTY_UNIQ_CLIENT_MESSAGE_ID_KEYIDX); add(REPLICATOR_DUP_INFO); add(REPLICATOR_SRC_MESSAGE_ID); add(MessageConst.PROPERTY_WAIT_STORE_MSG_OK); add(MessageConst.PROPERTY_TAGS); add(MessageConst.PROPERTY_DELAY_TIME_LEVEL); // add(MessageConst.PROPERTY_REAL_QUEUE_ID); add(MessageConst.PROPERTY_TRANSACTION_PREPARED); add(MessageConst.PROPERTY_BUYER_ID); add(MessageConst.PROPERTY_ORIGIN_MESSAGE_ID); add(MessageConst.PROPERTY_TRANSFER_FLAG); add(MessageConst.PROPERTY_CORRECTION_FLAG); add(MessageConst.PROPERTY_MQ2_FLAG); add(MessageConst.PROPERTY_RECONSUME_TIME); add(MessageConst.PROPERTY_MAX_RECONSUME_TIMES); add(MessageConst.PROPERTY_CONSUME_START_TIMESTAMP); add(MessageConst.PROPERTY_CHECK_IMMUNITY_TIME_IN_SECONDS); add(MessageConst.PROPERTY_TRANSACTION_PREPARED_QUEUE_OFFSET); add(MessageConst.PROPERTY_INSTANCE_ID); add(PROPERTY_TIMER_DELAY_SEC); add(PROPERTY_TIMER_DELIVER_MS); add(PROPERTY_TIMER_IN_MS); add(PROPERTY_TIMER_OUT_MS); add(PROPERTY_TIMER_ENQUEUE_MS); add(PROPERTY_TIMER_DEQUEUE_MS); add(PROPERTY_TIMER_ROLL_TIMES); add(PROPERTY_TIMER_DEL_UNIQKEY); add(PROPERTY_TIMER_DELAY_LEVEL); add(PROPERTY_POP_CK); add(PROPERTY_POP_CK_OFFSET); add(PROPERTY_FIRST_POP_TIME); add(PROPERTY_VTOA_TUNNEL_ID); } }; private void buildMqAdminClient() throws MQClientException { if (srcMQAdminExt != null) { srcMQAdminExt.shutdown(); } RPCHook rpcHook = null; if (connectorConfig.isSrcAclEnable()) { if (StringUtils.isNotEmpty(connectorConfig.getSrcAccessKey()) && StringUtils.isNotEmpty(connectorConfig.getSrcSecretKey())) { String srcAccessKey = connectorConfig.getSrcAccessKey(); String srcSecretKey = connectorConfig.getSrcSecretKey(); rpcHook = new AclClientRPCHook(new SessionCredentials(srcAccessKey, srcSecretKey)); } else { rpcHook = new AclClientRPCHook(new SessionCredentials()); } } srcMQAdminExt = new DefaultMQAdminExt(rpcHook); srcMQAdminExt.setNamesrvAddr(connectorConfig.getSrcEndpoint()); srcMQAdminExt.setAdminExtGroup(ReplicatorConnectorConfig.ADMIN_GROUP + "-" + UUID.randomUUID()); srcMQAdminExt.setInstanceName(connectorConfig.generateSourceString() + "-" + UUID.randomUUID()); log.info("initAdminThread : " + Thread.currentThread().getName()); srcMQAdminExt.start(); } private void createAndUpdatePullConsumerGroup(String clusterName, String subscriptionGroupName) throws InterruptedException, MQBrokerException, RemotingTimeoutException, RemotingSendRequestException, RemotingConnectException { SubscriptionGroupConfig subscriptionGroupConfig = new SubscriptionGroupConfig(); subscriptionGroupConfig.setGroupName(subscriptionGroupName); ClusterInfo clusterInfo = srcMQAdminExt.examineBrokerClusterInfo(); Collection<BrokerData> brokerDatas = clusterInfo.getBrokerAddrTable().values(); Set<String> brokerNames = null; if (StringUtils.isNotEmpty(clusterName)) { brokerNames = clusterInfo.getClusterAddrTable().get(clusterName); } Set<String> masterSet = new HashSet<>(); for (BrokerData brokerData : brokerDatas) { for (Map.Entry<Long, String> entry : brokerData.getBrokerAddrs().entrySet()) { if (null != brokerNames && brokerNames.contains(brokerData.getBrokerName()) && entry.getKey().equals(0L)) { masterSet.add(entry.getValue()); } } } for (String addr : masterSet) { try { srcMQAdminExt.createAndUpdateSubscriptionGroupConfig(addr, subscriptionGroupConfig); log.info("create subscription group to {} success.", addr); } catch (Exception e) { log.error("create subscription error,", e); Thread.sleep(1000); } } } private synchronized void buildConsumer() { if (pullConsumer != null) { return; } String consumerGroup = connectorConfig.generateTaskIdWithIndexAsConsumerGroup(); log.info("prepare to use " + consumerGroup + " as consumerGroup start consumer."); RPCHook rpcHook = null; if (connectorConfig.isSrcAclEnable()) { if (StringUtils.isNotEmpty(connectorConfig.getSrcAccessKey()) && StringUtils.isNotEmpty(connectorConfig.getSrcSecretKey())) { String srcAccessKey = connectorConfig.getSrcAccessKey(); String srcSecretKey = connectorConfig.getSrcSecretKey(); rpcHook = new AclClientRPCHook(new SessionCredentials(srcAccessKey, srcSecretKey)); } else { rpcHook = new AclClientRPCHook(new SessionCredentials()); } } pullConsumer = new DefaultLitePullConsumer(consumerGroup, rpcHook); String namesrvAddr = connectorConfig.getSrcEndpoint(); pullConsumer.setNamesrvAddr(namesrvAddr); pullConsumer.setInstanceName(connectorConfig.generateSourceString() + "-" + UUID.randomUUID().toString()); pullConsumer.setAutoCommit(false); pullConsumer.setPullBatchSize(32); } private void subscribeTopicAndStartConsumer() throws MQClientException { ConsumeFromWhere consumeFromWhere = connectorConfig.getConsumeFromWhere(); pullConsumer.setConsumeFromWhere(org.apache.rocketmq.common.consumer.ConsumeFromWhere.valueOf(consumeFromWhere.name())); log.info("litePullConsumer use " + consumeFromWhere.name()); if (consumeFromWhere == ConsumeFromWhere.CONSUME_FROM_TIMESTAMP) { long consumeFromTimestamp = connectorConfig.getConsumeFromTimestamp(); String timestamp = UtilAll.timeMillisToHumanString3(consumeFromTimestamp); pullConsumer.setConsumeTimestamp(timestamp); log.info("litePullConsumer consume start at " + timestamp); } // init normal queues String normalQueueStrs = connectorConfig.getDividedNormalQueues(); List<MessageQueue> allQueues; allQueues = parseMessageQueues(normalQueueStrs); normalQueues.addAll(allQueues); log.info("allQueues : " + allQueues); for (MessageQueue mq : allQueues) { log.info("mq : " + mq.getBrokerName() + mq.getQueueId() + " " + mq.hashCode() + mq.getClass()); } for (MessageQueue mq : allQueues) { String topic = mq.getTopic(); String tag = ReplicatorConnectorConfig.getSrcTopicTagMap(connectorConfig.getSrcInstanceId(), connectorConfig.getSrcTopicTags()).get(topic); pullConsumer.setSubExpressionForAssign(topic, tag); } try { pullConsumer.start(); pullConsumer.assign(allQueues); } catch (MQClientException e) { log.error("litePullConsumer start error", e); throw e; } } private List<MessageQueue> parseMessageQueues(String queueStrs) { log.info("prepare to parse queueStr 2 obj : " + queueStrs); List<MessageQueue> allQueues = new ArrayList<>(); List<MessageQueue> array = JSON.parseArray(queueStrs, MessageQueue.class); for (int i = 0; i < array.size(); i++) { MessageQueue mq = array.get(i); allQueues.add(mq); } return allQueues; } private void execScheduleTask() { metricsMonitorExecutorService.scheduleAtFixedRate(new Runnable() { @Override public void run() { try { replicateLagMetric(); } catch (Throwable e) { log.error("replicate log metric error", e); } } }, period, period, TimeUnit.MILLISECONDS); commitOffsetScheduleService.scheduleAtFixedRate(new Runnable() { @Override public void run() { try { commitOffsetSchedule(); } catch (Throwable e) { log.error("commit offset error", e); } } }, connectorConfig.getCommitOffsetIntervalMs(), connectorConfig.getCommitOffsetIntervalMs(), TimeUnit.MILLISECONDS); } private void commitOffsetSchedule() { Map<MessageQueue, Long> commitOffsetTable = new HashMap<>(); prepareCommitOffset.forEach((messageQueue, offset) -> commitOffsetTable.put(messageQueue, offset.get())); pullConsumer.commitSync(commitOffsetTable, true); } private void replicateLagMetric() { String consumerGroup = connectorConfig.generateTaskIdWithIndexAsConsumerGroup(); try { ConsumeStats consumeStats = srcMQAdminExt.examineConsumeStats(consumerGroup); AtomicLong normalDelayCount = new AtomicLong(); AtomicLong normalDelayMs = new AtomicLong(); Map<MessageQueue, OffsetWrapper> offsets = consumeStats.getOffsetTable(); offsets.forEach(new BiConsumer<MessageQueue, OffsetWrapper>() { @Override public void accept(MessageQueue messageQueue, OffsetWrapper offsetWrapper) { long delayMs = System.currentTimeMillis() - offsetWrapper.getLastTimestamp(); long delayCount = offsetWrapper.getBrokerOffset() - offsetWrapper.getConsumerOffset(); if (normalQueues.contains(messageQueue)) { normalDelayCount.addAndGet(delayCount); normalDelayMs.set(delayMs); } else { // unknown queues, just ignore; } } }); List<String> delayNumsKeys = new ArrayList<>(); List<String> delayMsKeys = new ArrayList<>(); String normalNumKey = connectorConfig.getConnectorId(); delayNumsKeys.add(normalNumKey); String normalMsKey = connectorConfig.getConnectorId(); delayMsKeys.add(normalMsKey); } catch (RemotingException | MQClientException e) { log.error("occur remoting or mqclient exception, retry build mqadminclient", e); try { buildMqAdminClient(); } catch (MQClientException mqClientException) { log.error("rebuild mqadmin client error", e); } } catch (Exception e) { log.error(" occur unknown exception", e); } } public synchronized boolean putPulledQueueOffset(MessageQueue mq, long currentOffset, int needAck, MessageExt msg) { log.trace("putPulledQueueOffset " + mq + ", currentOffset : " + currentOffset + ", ackCount : " + needAck); TreeMap<Long, UnAckMessage> offsets = queue2Offsets.get(mq); if (offsets == null) { TreeMap<Long, UnAckMessage> newOffsets = new TreeMap<>(); offsets = queue2Offsets.putIfAbsent(mq, newOffsets); if (offsets == null) { offsets = newOffsets; } } ReadWriteLock mqLock = locks.get(mq); if (mqLock == null) { ReadWriteLock newLock = new ReentrantReadWriteLock(); mqLock = locks.putIfAbsent(mq, newLock); if (mqLock == null) { mqLock = newLock; } } try { mqLock.writeLock().lockInterruptibly(); try { UnAckMessage old = offsets.put(currentOffset, new UnAckMessage(needAck, msg, currentOffset, mq)); if (null == old) { mq2MaxOffsets.put(mq, currentOffset); unAckCounter.incrementAndGet(); } return true; } finally { mqLock.writeLock().unlock(); } } catch (InterruptedException e) { log.error("lock error", e); return false; } } @Override public List<ConnectRecord> poll() throws InterruptedException { if (unAckCounter.get() > MAX_UNACK) { Thread.sleep(2); if (pollCounter.incrementAndGet() % 1000 == 0) { log.info("poll unAckCount > 10000 sleep 2ms"); } return null; } // sync wait for rate limit boolean overflow = TpsLimiter.isOverFlow(sourceTaskContext.getTaskName(), tpsLimit); if (overflow) { if (rateCounter.incrementAndGet() % 1000 == 0) { log.info("rateLimiter occur."); } return null; } try { List<MessageExt> messageExts = pullConsumer.poll(); // PullResult pullResult = pullConsumer.pull(mq, tag, pullRequest.getNextOffset(), maxNum); if (null != messageExts && messageExts.size() > 0) { List<ConnectRecord> connectRecords = new ArrayList<>(messageExts.size()); int index = 0; for (MessageExt msg : messageExts) { MessageQueue mq = new MessageQueue(); mq.setTopic(msg.getTopic()); mq.setBrokerName(msg.getBrokerName()); mq.setQueueId(msg.getQueueId()); boolean put = putPulledQueueOffset(mq, msg.getQueueOffset(), 1, msg); if (!put) { log.error("bug"); int i = 0; for (MessageExt tmp : messageExts) { if (i++ < index) { removeMessage(mq, tmp.getQueueOffset()); } } return null; } index++; ConnectRecord connectRecord = convertToSinkDataEntry(msg); try { if (connectRecord != null) { connectRecords.add(connectRecord); TpsLimiter.addPv(connectorConfig.getConnectorId(), 1); } } finally { if (connectRecord == null) { long canCommitOffset = removeMessage(mq, msg.getQueueOffset()); commitOffset(mq, canCommitOffset); } } } return connectRecords; } else { if ((noMessageCounter.incrementAndGet() + random.nextInt(10)) % printLogThreshold == 0) { log.info("no new message"); } } } catch (Exception e) { log.error("pull message error,", e); } return null; } private String swapTopic(String topic) { if (topic.startsWith("%RETRY%") || topic.startsWith("%DLQ%")) { log.error("topic : " + topic + " is retry or dlq."); return null; } String targetTopic = connectorConfig.getDestTopic(); String targetTopicWithInstanceId; if (StringUtils.isEmpty(targetTopic) || StringUtils.isBlank(targetTopic)) { targetTopicWithInstanceId = ReplicatorUtils.buildTopicWithNamespace(topic, connectorConfig.getDestInstanceId()); } else { targetTopicWithInstanceId = ReplicatorUtils.buildTopicWithNamespace(targetTopic, connectorConfig.getDestInstanceId()); } return targetTopicWithInstanceId; } private ConnectRecord convertToSinkDataEntry(MessageExt message) { String topic = message.getTopic(); Map<String, String> properties = message.getProperties(); log.debug("srcProperties : " + properties); Long timestamp; ConnectRecord sinkDataEntry = null; String connectTimestamp = properties.get(ReplicatorConnectorConfig.CONNECT_TIMESTAMP); timestamp = StringUtils.isNotEmpty(connectTimestamp) ? Long.parseLong(connectTimestamp) : System.currentTimeMillis(); Schema schema = SchemaBuilder.string().build(); byte[] body = message.getBody(); String destTopic = swapTopic(topic); if (destTopic == null) { if (!connectorConfig.getFailoverStrategy().equals(FailoverStrategy.DISMISS)) { throw new RuntimeException("cannot find dest topic."); } else { log.error("swap topic got null, topic : " + topic); } } RecordPartition recordPartition = ReplicatorUtils.convertToRecordPartition(topic, message.getBrokerName(), message.getQueueId()); RecordOffset recordOffset = ReplicatorUtils.convertToRecordOffset(message.getQueueOffset()); String bodyStr = new String(body, StandardCharsets.UTF_8); sinkDataEntry = new ConnectRecord(recordPartition, recordOffset, timestamp, schema, bodyStr); KeyValue keyValue = new DefaultKeyValue(); if (org.apache.commons.collections.MapUtils.isNotEmpty(properties)) { for (Map.Entry<String, String> entry : properties.entrySet()) { if (MQ_SYS_KEYS.contains(entry.getKey())) { keyValue.put("MQ-SYS-" + entry.getKey(), entry.getValue()); } else if (entry.getKey().startsWith("connect-ext-")) { keyValue.put(entry.getKey().replaceAll("connect-ext-", ""), entry.getValue()); } else { keyValue.put(entry.getKey(), entry.getValue()); } } } // check bornSource have destinationStr + "," String bornSource = keyValue.getString(REPLICATOR_BORN_SOURCE_CLOUD_CLUSTER_REGION); // skip msg born from destination if (bornSource != null && bornSource.contains(connectorConfig.generateDestinationString() + ",")) { if (circleReplicateCounter.incrementAndGet() % 100 == 0) { log.warn("skip " + circleReplicateCounter.get() + " message have replicated from " + connectorConfig.generateDestinationString() + ", bornSource : " + bornSource + ", message : " + message); } return null; } // save all source in born source, format is srcCloud "_" srcCluster "_" srcRegion ","; if (StringUtils.isEmpty(bornSource)) { bornSource = ""; } keyValue.put(REPLICATOR_BORN_SOURCE_CLOUD_CLUSTER_REGION, bornSource + connectorConfig.generateSourceString() + ","); String bornTopic = keyValue.getString(REPLICATOR_BORE_INSTANCEID_TOPIC); // save born topic if empty if (StringUtils.isEmpty(bornTopic)) { // save full topic, format is srcInstanceId "%" srcTopicTags; keyValue.put(REPLICATOR_BORE_INSTANCEID_TOPIC, connectorConfig.generateFullSourceTopicTags()); } // put src born timestamp keyValue.put(REPLICATOR_BORN_SOURCE_TIMESTAMP, message.getBornTimestamp()); // put src topic keyValue.put(REPLICATOR_SRC_TOPIC_PROPERTY_KEY, topic); // save tags if (StringUtils.isNotBlank(message.getTags())) { keyValue.put(MessageConst.PROPERTY_TAGS, message.getTags()); } // save keys if (StringUtils.isNotBlank(message.getKeys())) { keyValue.put(MessageConst.PROPERTY_KEYS, message.getKeys()); } // save src messageid keyValue.put(REPLICATOR_SRC_MESSAGE_ID, message.getMsgId()); log.debug("addExtension : " + keyValue.keySet()); sinkDataEntry.addExtension(keyValue); sinkDataEntry.addExtension(ReplicatorUtils.TOPIC, destTopic); return sinkDataEntry; } private AtomicLong flushInterval = new AtomicLong(); public long removeMessage(MessageQueue mq, long removeOffset) { TreeMap<Long, UnAckMessage> offsets = queue2Offsets.get(mq); if (offsets == null) { // warn log, maybe just rebalance log.error("queue2Offset get mq wrong, mq : " + mq); return -1; } ReadWriteLock mqLock = locks.get(mq); if (mqLock == null) { log.error("bug"); return -1; } long finalMaxCommitOffset = -1; try { mqLock.writeLock().lockInterruptibly(); try { if (!offsets.isEmpty()) { Long maxOffset = mq2MaxOffsets.get(mq); if (maxOffset == null) { log.error("bug"); return -1; } finalMaxCommitOffset = maxOffset + 1; UnAckMessage prev = offsets.remove(removeOffset); if (prev != null) { unAckCounter.decrementAndGet(); } if (!offsets.isEmpty()) { finalMaxCommitOffset = offsets.firstKey(); } } } finally { mqLock.writeLock().unlock(); } } catch (Throwable t) { log.error("removeMessage exception", t); } log.info("markQueueCommitted remove mq : " + mq + " offset : " + removeOffset + ", commit offset : " + finalMaxCommitOffset); return finalMaxCommitOffset; } public void commitOffset(MessageQueue mq, long canCommitOffset) { if (canCommitOffset == -1) { return; } AtomicLong commitOffset = prepareCommitOffset.get(mq); if (commitOffset == null) { commitOffset = new AtomicLong(canCommitOffset); AtomicLong old = prepareCommitOffset.putIfAbsent(mq, new AtomicLong(canCommitOffset)); if (old != null) { commitOffset = old; } } MixAll.compareAndIncreaseOnly(commitOffset, canCommitOffset); } @Override public void commit(ConnectRecord record, Map<String, String> metadata) { if (metadata == null) { // send failed if (FailoverStrategy.DISMISS.equals(connectorConfig.getFailoverStrategy())) { // log saveFailedMessage(record, "failed"); } else { saveFailedMessage(record, "failed"); } } try { // send success, record offset Map<String, ?> map = record.getPosition().getPartition().getPartition(); String brokerName = (String) map.get("brokerName"); String topic = (String) map.get("topic"); int queueId = Integer.parseInt((String) map.get("queueId")); MessageQueue mq = new MessageQueue(topic, brokerName, queueId); Map<String, ?> offsetMap = record.getPosition().getOffset().getOffset(); long offset = Long.parseLong((String) offsetMap.get(QUEUE_OFFSET)); long canCommitOffset = removeMessage(mq, offset); commitOffset(mq, canCommitOffset); } catch (Exception e) { buglog.error("[Bug] commit parse record error", e); } } private void saveFailedMessage(Object msg, String errType) { workerErrorMsgLog.error("putMessage error " + errType + ", msg : " + msg); } @Override public void validate(KeyValue config) { } @Override public void start(KeyValue config) { log.info("ReplicatorSourceTask init " + config); log.info("sourceTaskContextConfigs : " + sourceTaskContext.configs()); // build connectConfig connectorConfig.setTaskId(sourceTaskContext.getTaskName().substring(sourceTaskContext.getConnectorName().length()) + 1); connectorConfig.setConnectorId(sourceTaskContext.getConnectorName()); connectorConfig.setSrcCloud(config.getString(ReplicatorConnectorConfig.SRC_CLOUD)); connectorConfig.setSrcRegion(config.getString(ReplicatorConnectorConfig.SRC_REGION)); connectorConfig.setSrcCluster(config.getString(ReplicatorConnectorConfig.SRC_CLUSTER)); connectorConfig.setSrcInstanceId(config.getString(ReplicatorConnectorConfig.SRC_INSTANCEID)); connectorConfig.setSrcEndpoint(config.getString(ReplicatorConnectorConfig.SRC_ENDPOINT)); connectorConfig.setSrcTopicTags(config.getString(ReplicatorConnectorConfig.SRC_TOPICTAGS)); connectorConfig.setDestCloud(config.getString(ReplicatorConnectorConfig.DEST_CLOUD)); connectorConfig.setDestRegion(config.getString(ReplicatorConnectorConfig.DEST_REGION)); connectorConfig.setDestCluster(config.getString(ReplicatorConnectorConfig.DEST_CLUSTER)); connectorConfig.setDestInstanceId(config.getString(ReplicatorConnectorConfig.DEST_INSTANCEID)); connectorConfig.setDestEndpoint(config.getString(ReplicatorConnectorConfig.DEST_ENDPOINT)); connectorConfig.setDestTopic(config.getString(ReplicatorConnectorConfig.DEST_TOPIC)); connectorConfig.setDestAclEnable(Boolean.parseBoolean(config.getString(ReplicatorConnectorConfig.DEST_ACL_ENABLE, "true"))); connectorConfig.setSrcAclEnable(Boolean.parseBoolean(config.getString(ReplicatorConnectorConfig.SRC_ACL_ENABLE, "true"))); connectorConfig.setAutoCreateInnerConsumergroup(Boolean.parseBoolean(config.getString(ReplicatorConnectorConfig.AUTO_CREATE_INNER_CONSUMERGROUP, "false"))); connectorConfig.setSyncTps(config.getInt(ReplicatorConnectorConfig.SYNC_TPS)); connectorConfig.setDividedNormalQueues(config.getString(ReplicatorConnectorConfig.DIVIDED_NORMAL_QUEUES)); connectorConfig.setSrcAccessKey(config.getString(ReplicatorConnectorConfig.SRC_ACCESS_KEY)); connectorConfig.setSrcSecretKey(config.getString(ReplicatorConnectorConfig.SRC_SECRET_KEY)); connectorConfig.setCommitOffsetIntervalMs(config.getLong(ReplicatorConnectorConfig.COMMIT_OFFSET_INTERVALS_MS, 10 * 1000)); connectorConfig.setConsumeFromWhere(config.getString(ReplicatorConnectorConfig.CONSUME_FROM_WHERE, ConsumeFromWhere.CONSUME_FROM_LAST_OFFSET.name())); if (connectorConfig.getConsumeFromWhere() == ConsumeFromWhere.CONSUME_FROM_TIMESTAMP) { connectorConfig.setConsumeFromTimestamp(Long.parseLong(config.getString(ReplicatorConnectorConfig.CONSUME_FROM_TIMESTAMP))); } log.info("ReplicatorSourceTask connectorConfig : " + connectorConfig); try { log.info("prepare init ...."); // get pull consumer group & create group String srcClusterName = connectorConfig.getSrcCluster(); String pullConsumerGroup = connectorConfig.generateTaskIdWithIndexAsConsumerGroup(); buildMqAdminClient(); if (connectorConfig.isAutoCreateInnerConsumergroup()) { createAndUpdatePullConsumerGroup(srcClusterName, pullConsumerGroup); } log.info("createAndUpdatePullConsumerGroup " + pullConsumerGroup + " finished."); // init converter // init pullConsumer buildConsumer(); log.info("buildConsumer finished."); // init limiter tpsLimit = connectorConfig.getSyncTps(); log.info("RateLimiter init finished."); // subscribe topic & start consumer subscribeTopicAndStartConsumer(); // init sync delay metrics monitor execScheduleTask(); log.info("RateLimiter init finished."); log.info("QueueOffsetManager init finished."); } catch (Exception e) { log.error("start ReplicatorSourceTask error, please check connectorConfig.", e); cleanResource(); throw new StartTaskException("Start replicator source task error, errMsg : " + e.getMessage(), e); } } private void cleanResource() { try { if (pullConsumer != null) { pullConsumer.shutdown(); } if (metricsMonitorExecutorService != null) { metricsMonitorExecutorService.shutdown(); } } catch (Exception e) { log.error("clean resource error,", e); } } @Override public void stop() { cleanResource(); } }
apache/hadoop
36,327
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode; import java.io.FileNotFoundException; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.hadoop.fs.PathIsNotDirectoryException; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.PermissionStatus; import org.apache.hadoop.fs.StorageType; import org.apache.hadoop.fs.XAttr; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.protocol.SnapshotException; import org.apache.hadoop.hdfs.server.blockmanagement.BlockStoragePolicySuite; import org.apache.hadoop.hdfs.server.namenode.INodeReference.WithCount; import org.apache.hadoop.hdfs.server.namenode.visitor.NamespaceVisitor; import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectorySnapshottableFeature; import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature; import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiffList; import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot; import org.apache.hadoop.hdfs.server.namenode.snapshot.SnapshotManager; import org.apache.hadoop.hdfs.util.ReadOnlyList; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.security.AccessControlException; import static org.apache.hadoop.hdfs.protocol.HdfsConstants.BLOCK_STORAGE_POLICY_ID_UNSPECIFIED; /** * Directory INode class. */ public class INodeDirectory extends INodeWithAdditionalFields implements INodeDirectoryAttributes { /** Cast INode to INodeDirectory. */ public static INodeDirectory valueOf(INode inode, Object path ) throws FileNotFoundException, PathIsNotDirectoryException { if (inode == null) { throw new FileNotFoundException("Directory does not exist: " + DFSUtil.path2String(path)); } if (!inode.isDirectory()) { throw new PathIsNotDirectoryException(DFSUtil.path2String(path)); } return inode.asDirectory(); } // Profiling shows that most of the file lists are between 1 and 4 elements. // Thus allocate the corresponding ArrayLists with a small initial capacity. public static final int DEFAULT_FILES_PER_DIRECTORY = 2; static final byte[] ROOT_NAME = DFSUtil.string2Bytes(""); private List<INode> children = null; /** constructor */ public INodeDirectory(long id, byte[] name, PermissionStatus permissions, long mtime) { super(id, name, permissions, mtime, 0L); } /** * Copy constructor * @param other The INodeDirectory to be copied * @param adopt Indicate whether or not need to set the parent field of child * INodes to the new node * @param featuresToCopy any number of features to copy to the new node. * The method will do a reference copy, not a deep copy. */ public INodeDirectory(INodeDirectory other, boolean adopt, Feature... featuresToCopy) { super(other); this.children = other.children; if (adopt && this.children != null) { for (INode child : children) { child.setParent(this); } } this.features = featuresToCopy; AclFeature aclFeature = getFeature(AclFeature.class); if (aclFeature != null) { // for the de-duplication of AclFeature removeFeature(aclFeature); addFeature(AclStorage.addAclFeature(aclFeature)); } } /** @return true unconditionally. */ @Override public final boolean isDirectory() { return true; } /** @return this object. */ @Override public final INodeDirectory asDirectory() { return this; } @Override public byte getLocalStoragePolicyID() { XAttrFeature f = getXAttrFeature(); XAttr xattr = f == null ? null : f.getXAttr( BlockStoragePolicySuite.getStoragePolicyXAttrPrefixedName()); if (xattr != null) { return (xattr.getValue())[0]; } return BLOCK_STORAGE_POLICY_ID_UNSPECIFIED; } @Override public byte getStoragePolicyID() { byte id = getLocalStoragePolicyID(); if (id != BLOCK_STORAGE_POLICY_ID_UNSPECIFIED) { return id; } // if it is unspecified, check its parent return getParent() != null ? getParent().getStoragePolicyID() : BLOCK_STORAGE_POLICY_ID_UNSPECIFIED; } void setQuota(BlockStoragePolicySuite bsps, long nsQuota, long ssQuota, StorageType type) { DirectoryWithQuotaFeature quota = getDirectoryWithQuotaFeature(); if (quota != null) { // already has quota; so set the quota to the new values if (type != null) { quota.setQuota(ssQuota, type); } else { quota.setQuota(nsQuota, ssQuota); } if (!isQuotaSet() && !isRoot()) { removeFeature(quota); } } else { final QuotaCounts c = computeQuotaUsage(bsps); DirectoryWithQuotaFeature.Builder builder = new DirectoryWithQuotaFeature.Builder().nameSpaceQuota(nsQuota); if (type != null) { builder.typeQuota(type, ssQuota); } else { builder.storageSpaceQuota(ssQuota); } addDirectoryWithQuotaFeature(builder.build()).setSpaceConsumed(c); } } @Override public QuotaCounts getQuotaCounts() { final DirectoryWithQuotaFeature q = getDirectoryWithQuotaFeature(); return q != null? q.getQuota(): super.getQuotaCounts(); } @Override public void addSpaceConsumed(QuotaCounts counts) { super.addSpaceConsumed(counts); final DirectoryWithQuotaFeature q = getDirectoryWithQuotaFeature(); if (q != null && isQuotaSet()) { q.addSpaceConsumed2Cache(counts); } } /** * If the directory contains a {@link DirectoryWithQuotaFeature}, return it; * otherwise, return null. */ public final DirectoryWithQuotaFeature getDirectoryWithQuotaFeature() { return getFeature(DirectoryWithQuotaFeature.class); } /** Is this directory with quota? */ final boolean isWithQuota() { return getDirectoryWithQuotaFeature() != null; } DirectoryWithQuotaFeature addDirectoryWithQuotaFeature( DirectoryWithQuotaFeature q) { Preconditions.checkState(!isWithQuota(), "Directory is already with quota"); addFeature(q); return q; } int searchChildren(byte[] name) { return children == null? -1: Collections.binarySearch(children, name); } public DirectoryWithSnapshotFeature addSnapshotFeature( DirectoryDiffList diffs) { Preconditions.checkState(!isWithSnapshot(), "Directory is already with snapshot"); DirectoryWithSnapshotFeature sf = new DirectoryWithSnapshotFeature(diffs); addFeature(sf); return sf; } /** * If feature list contains a {@link DirectoryWithSnapshotFeature}, return it; * otherwise, return null. */ public final DirectoryWithSnapshotFeature getDirectoryWithSnapshotFeature() { return getFeature(DirectoryWithSnapshotFeature.class); } /** Is this file has the snapshot feature? */ public final boolean isWithSnapshot() { return getDirectoryWithSnapshotFeature() != null; } public DirectoryDiffList getDiffs() { DirectoryWithSnapshotFeature sf = getDirectoryWithSnapshotFeature(); return sf != null ? sf.getDiffs() : null; } @Override public INodeDirectoryAttributes getSnapshotINode(int snapshotId) { DirectoryWithSnapshotFeature sf = getDirectoryWithSnapshotFeature(); return sf == null ? this : sf.getDiffs().getSnapshotINode(snapshotId, this); } @Override public String toDetailString() { DirectoryWithSnapshotFeature sf = this.getDirectoryWithSnapshotFeature(); return super.toDetailString() + (sf == null ? "" : ", " + sf.getDiffs()); } public DirectorySnapshottableFeature getDirectorySnapshottableFeature() { return getFeature(DirectorySnapshottableFeature.class); } public boolean isSnapshottable() { return getDirectorySnapshottableFeature() != null; } /** * Check if this directory is a descendant directory * of a snapshot root directory. * @param snapshotRootDir the snapshot root directory * @return true if this directory is a descendant of snapshot root */ public boolean isDescendantOfSnapshotRoot(INodeDirectory snapshotRootDir) { Preconditions.checkArgument(snapshotRootDir.isSnapshottable()); INodeDirectory dir = this; while(dir != null) { if (dir.equals(snapshotRootDir)) { return true; } dir = dir.getParent(); } return false; } public Snapshot getSnapshot(byte[] snapshotName) { return getDirectorySnapshottableFeature().getSnapshot(snapshotName); } public void setSnapshotQuota(int snapshotQuota) { getDirectorySnapshottableFeature().setSnapshotQuota(snapshotQuota); } /** * Add a snapshot. * @param name Name of the snapshot. * @param mtime The snapshot creation time set by Time.now(). */ public Snapshot addSnapshot(SnapshotManager snapshotManager, String name, final LeaseManager leaseManager, long mtime) throws SnapshotException { return getDirectorySnapshottableFeature().addSnapshot(this, snapshotManager, name, leaseManager, mtime); } /** * Delete a snapshot. * @param snapshotName Name of the snapshot. * @param mtime The snapshot deletion time set by Time.now(). */ public Snapshot removeSnapshot(ReclaimContext reclaimContext, String snapshotName, long mtime, SnapshotManager snapshotManager) throws SnapshotException { return getDirectorySnapshottableFeature().removeSnapshot( reclaimContext, this, snapshotName, mtime, snapshotManager); } /** * Rename a snapshot. * @param path The directory path where the snapshot was taken. * @param oldName Old name of the snapshot * @param newName New name the snapshot will be renamed to * @param mtime The snapshot modification time set by Time.now(). */ public void renameSnapshot(String path, String oldName, String newName, long mtime) throws SnapshotException { getDirectorySnapshottableFeature().renameSnapshot(path, oldName, newName, mtime); } /** add DirectorySnapshottableFeature */ public void addSnapshottableFeature() { Preconditions.checkState(!isSnapshottable(), "this is already snapshottable, this=%s", this); DirectoryWithSnapshotFeature s = this.getDirectoryWithSnapshotFeature(); final DirectorySnapshottableFeature snapshottable = new DirectorySnapshottableFeature(s); if (s != null) { this.removeFeature(s); } this.addFeature(snapshottable); } /** remove DirectorySnapshottableFeature */ public void removeSnapshottableFeature() { DirectorySnapshottableFeature s = getDirectorySnapshottableFeature(); Preconditions.checkState(s != null, "The dir does not have snapshottable feature: this=%s", this); this.removeFeature(s); if (s.getDiffs().asList().size() > 0) { // add a DirectoryWithSnapshotFeature back DirectoryWithSnapshotFeature sf = new DirectoryWithSnapshotFeature( s.getDiffs()); addFeature(sf); } } /** * Replace the given child with a new child. Note that we no longer need to * replace an normal INodeDirectory or INodeFile into an * INodeDirectoryWithSnapshot or INodeFileUnderConstruction. The only cases * for child replacement is for reference nodes. */ public void replaceChild(INode oldChild, final INode newChild, final INodeMap inodeMap) { Preconditions.checkNotNull(children); final int i = searchChildren(newChild.getLocalNameBytes()); Preconditions.checkState(i >= 0); Preconditions.checkState(oldChild == children.get(i) || oldChild == children.get(i).asReference().getReferredINode() .asReference().getReferredINode()); oldChild = children.get(i); if (oldChild.isReference() && newChild.isReference()) { // both are reference nodes, e.g., DstReference -> WithName final INodeReference.WithCount withCount = (WithCount) oldChild.asReference().getReferredINode(); withCount.removeReference(oldChild.asReference()); } children.set(i, newChild); // replace the instance in the created list of the diff list DirectoryWithSnapshotFeature sf = this.getDirectoryWithSnapshotFeature(); if (sf != null) { sf.getDiffs().replaceCreatedChild(oldChild, newChild); } // update the inodeMap if (inodeMap != null) { inodeMap.put(newChild); } } INodeReference.WithName replaceChild4ReferenceWithName(INode oldChild, int latestSnapshotId) { Preconditions.checkArgument(latestSnapshotId != Snapshot.CURRENT_STATE_ID); if (oldChild instanceof INodeReference.WithName) { return (INodeReference.WithName)oldChild; } final INodeReference.WithCount withCount; if (oldChild.isReference()) { Preconditions.checkState(oldChild instanceof INodeReference.DstReference); withCount = (INodeReference.WithCount) oldChild.asReference() .getReferredINode(); } else { withCount = new INodeReference.WithCount(null, oldChild); } final INodeReference.WithName ref = new INodeReference.WithName(this, withCount, oldChild.getLocalNameBytes(), latestSnapshotId); replaceChild(oldChild, ref, null); return ref; } @Override public void recordModification(int latestSnapshotId) { if (isInLatestSnapshot(latestSnapshotId) && !shouldRecordInSrcSnapshot(latestSnapshotId)) { // add snapshot feature if necessary DirectoryWithSnapshotFeature sf = getDirectoryWithSnapshotFeature(); if (sf == null) { sf = addSnapshotFeature(null); } // record self in the diff list if necessary sf.getDiffs().saveSelf2Snapshot(latestSnapshotId, this, null); } } /** * Save the child to the latest snapshot. * * @return the child inode, which may be replaced. */ public INode saveChild2Snapshot(final INode child, final int latestSnapshotId, final INode snapshotCopy) { if (latestSnapshotId == Snapshot.CURRENT_STATE_ID) { return child; } // add snapshot feature if necessary DirectoryWithSnapshotFeature sf = getDirectoryWithSnapshotFeature(); if (sf == null) { sf = this.addSnapshotFeature(null); } return sf.saveChild2Snapshot(this, child, latestSnapshotId, snapshotCopy); } /** * @param name the name of the child * @param snapshotId * if it is not {@link Snapshot#CURRENT_STATE_ID}, get the result * from the corresponding snapshot; otherwise, get the result from * the current directory. * @return the child inode. */ public INode getChild(byte[] name, int snapshotId) { DirectoryWithSnapshotFeature sf; if (snapshotId == Snapshot.CURRENT_STATE_ID || (sf = getDirectoryWithSnapshotFeature()) == null) { ReadOnlyList<INode> c = getCurrentChildrenList(); final int i = ReadOnlyList.Util.binarySearch(c, name); return i < 0 ? null : c.get(i); } return sf.getChild(this, name, snapshotId); } /** * Search for the given INode in the children list and the deleted lists of * snapshots. * @return {@link Snapshot#CURRENT_STATE_ID} if the inode is in the children * list; {@link Snapshot#NO_SNAPSHOT_ID} if the inode is neither in the * children list nor in any snapshot; otherwise the snapshot id of the * corresponding snapshot diff list. */ public int searchChild(INode inode) { INode child = getChild(inode.getLocalNameBytes(), Snapshot.CURRENT_STATE_ID); if (child != inode) { // inode is not in parent's children list, thus inode must be in // snapshot. identify the snapshot id and later add it into the path DirectoryDiffList diffs = getDiffs(); if (diffs == null) { return Snapshot.NO_SNAPSHOT_ID; } return diffs.findSnapshotDeleted(inode); } else { return Snapshot.CURRENT_STATE_ID; } } /** * @param snapshotId * if it is not {@link Snapshot#CURRENT_STATE_ID}, get the result * from the corresponding snapshot; otherwise, get the result from * the current directory. * @return the current children list if the specified snapshot is null; * otherwise, return the children list corresponding to the snapshot. * Note that the returned list is never null. */ public ReadOnlyList<INode> getChildrenList(final int snapshotId) { DirectoryWithSnapshotFeature sf; if (snapshotId == Snapshot.CURRENT_STATE_ID || (sf = this.getDirectoryWithSnapshotFeature()) == null) { return getCurrentChildrenList(); } return sf.getChildrenList(this, snapshotId); } private ReadOnlyList<INode> getCurrentChildrenList() { return children == null ? ReadOnlyList.Util.<INode> emptyList() : ReadOnlyList.Util.asReadOnlyList(children); } /** * Given a child's name, return the index of the next child * * @param name a child's name * @return the index of the next child */ static int nextChild(ReadOnlyList<INode> children, byte[] name) { if (name.length == 0) { // empty name return 0; } int nextPos = ReadOnlyList.Util.binarySearch(children, name) + 1; if (nextPos >= 0) { return nextPos; } return -nextPos; } /** * Remove the specified child from this directory. */ public boolean removeChild(INode child, int latestSnapshotId) { if (isInLatestSnapshot(latestSnapshotId)) { // create snapshot feature if necessary DirectoryWithSnapshotFeature sf = this.getDirectoryWithSnapshotFeature(); if (sf == null) { sf = this.addSnapshotFeature(null); } return sf.removeChild(this, child, latestSnapshotId); } return removeChild(child); } /** * Remove the specified child from this directory. * The basic remove method which actually calls children.remove(..). * * @param child the child inode to be removed * * @return true if the child is removed; false if the child is not found. */ public boolean removeChild(final INode child) { final int i = searchChildren(child.getLocalNameBytes()); if (i < 0) { return false; } final INode removed = children.remove(i); Preconditions.checkState(removed.equals(child)); return true; } /** * Add a child inode to the directory. * * @param node INode to insert * @param setModTime set modification time for the parent node * not needed when replaying the addition and * the parent already has the proper mod time * @return false if the child with this name already exists; * otherwise, return true; */ public boolean addChild(INode node, final boolean setModTime, final int latestSnapshotId) { final int low = searchChildren(node.getLocalNameBytes()); if (low >= 0) { return false; } if (isInLatestSnapshot(latestSnapshotId)) { // create snapshot feature if necessary DirectoryWithSnapshotFeature sf = this.getDirectoryWithSnapshotFeature(); if (sf == null) { sf = this.addSnapshotFeature(null); } return sf.addChild(this, node, setModTime, latestSnapshotId); } addChild(node, low); if (setModTime) { // update modification time of the parent directory updateModificationTime(node.getModificationTime(), latestSnapshotId); } return true; } public boolean addChild(INode node) { final int low = searchChildren(node.getLocalNameBytes()); if (low >= 0) { return false; } addChild(node, low); return true; } /** * During image loading, the search is unnecessary since the insert position * should always be at the end of the map given the sequence they are * serialized on disk. */ public boolean addChildAtLoading(INode node) { int pos; if (!node.isReference()) { pos = (children == null) ? (-1) : (-children.size() - 1); addChild(node, pos); return true; } else { return addChild(node); } } /** * Add the node to the children list at the given insertion point. * The basic add method which actually calls children.add(..). */ private void addChild(final INode node, final int insertionPoint) { if (children == null) { children = new ArrayList<>(DEFAULT_FILES_PER_DIRECTORY); } node.setParent(this); children.add(-insertionPoint - 1, node); if (node.getGroupName() == null) { node.setGroup(getGroupName()); } } @Override public QuotaCounts computeQuotaUsage(BlockStoragePolicySuite bsps, byte blockStoragePolicyId, boolean useCache, int lastSnapshotId) { final DirectoryWithSnapshotFeature sf = getDirectoryWithSnapshotFeature(); QuotaCounts counts = new QuotaCounts.Builder().build(); // we are computing the quota usage for a specific snapshot here, i.e., the // computation only includes files/directories that exist at the time of the // given snapshot if (sf != null && lastSnapshotId != Snapshot.CURRENT_STATE_ID && !(useCache && isQuotaSet())) { ReadOnlyList<INode> childrenList = getChildrenList(lastSnapshotId); for (INode child : childrenList) { final byte childPolicyId = child.getStoragePolicyIDForQuota( blockStoragePolicyId); counts.add(child.computeQuotaUsage(bsps, childPolicyId, useCache, lastSnapshotId)); } counts.addNameSpace(1); return counts; } // compute the quota usage in the scope of the current directory tree final DirectoryWithQuotaFeature q = getDirectoryWithQuotaFeature(); if (useCache && q != null && q.isQuotaSet()) { // use the cached quota return q.AddCurrentSpaceUsage(counts); } else { useCache = q != null && !q.isQuotaSet() ? false : useCache; return computeDirectoryQuotaUsage(bsps, blockStoragePolicyId, counts, useCache, lastSnapshotId); } } private QuotaCounts computeDirectoryQuotaUsage(BlockStoragePolicySuite bsps, byte blockStoragePolicyId, QuotaCounts counts, boolean useCache, int lastSnapshotId) { if (children != null) { for (INode child : children) { final byte childPolicyId = child.getStoragePolicyIDForQuota( blockStoragePolicyId); counts.add(child.computeQuotaUsage(bsps, childPolicyId, useCache, lastSnapshotId)); } } return computeQuotaUsage4CurrentDirectory(bsps, blockStoragePolicyId, counts); } /** Add quota usage for this inode excluding children. */ public QuotaCounts computeQuotaUsage4CurrentDirectory( BlockStoragePolicySuite bsps, byte storagePolicyId, QuotaCounts counts) { counts.addNameSpace(1); // include the diff list DirectoryWithSnapshotFeature sf = getDirectoryWithSnapshotFeature(); if (sf != null) { counts.add(sf.computeQuotaUsage4CurrentDirectory(bsps, storagePolicyId)); } return counts; } @Override public ContentSummaryComputationContext computeContentSummary(int snapshotId, ContentSummaryComputationContext summary) throws AccessControlException { final DirectoryWithSnapshotFeature sf = getDirectoryWithSnapshotFeature(); if (sf != null && snapshotId == Snapshot.CURRENT_STATE_ID) { final ContentCounts counts = new ContentCounts.Builder().build(); // if the getContentSummary call is against a non-snapshot path, the // computation should include all the deleted files/directories sf.computeContentSummary4Snapshot(summary.getBlockStoragePolicySuite(), counts); summary.getCounts().addContents(counts); // Also add ContentSummary to snapshotCounts (So we can extract it // later from the ContentSummary of all). summary.getSnapshotCounts().addContents(counts); } final DirectoryWithQuotaFeature q = getDirectoryWithQuotaFeature(); if (q != null && snapshotId == Snapshot.CURRENT_STATE_ID) { return q.computeContentSummary(this, summary); } else { return computeDirectoryContentSummary(summary, snapshotId); } } protected ContentSummaryComputationContext computeDirectoryContentSummary( ContentSummaryComputationContext summary, int snapshotId) throws AccessControlException{ // throws exception if failing the permission check summary.checkPermission(this, snapshotId, FsAction.READ_EXECUTE); ReadOnlyList<INode> childrenList = getChildrenList(snapshotId); // Explicit traversing is done to enable repositioning after relinquishing // and reacquiring locks. for (int i = 0; i < childrenList.size(); i++) { INode child = childrenList.get(i); byte[] childName = child.getLocalNameBytes(); long lastYieldCount = summary.getYieldCount(); child.computeContentSummary(snapshotId, summary); // Check whether the computation was paused in the subtree. // The counts may be off, but traversing the rest of children // should be made safe. if (lastYieldCount == summary.getYieldCount()) { continue; } // The locks were released and reacquired. Check parent first. if (!isRoot() && getParent() == null) { // Stop further counting and return whatever we have so far. break; } // Obtain the children list again since it may have been modified. childrenList = getChildrenList(snapshotId); // Reposition in case the children list is changed. Decrement by 1 // since it will be incremented when loops. i = nextChild(childrenList, childName) - 1; } // Increment the directory count for this directory. summary.getCounts().addContent(Content.DIRECTORY, 1); // Relinquish and reacquire locks if necessary. summary.yield(); return summary; } /** * This method is usually called by the undo section of rename. * * Before calling this function, in the rename operation, we replace the * original src node (of the rename operation) with a reference node (WithName * instance) in both the children list and a created list, delete the * reference node from the children list, and add it to the corresponding * deleted list. * * To undo the above operations, we have the following steps in particular: * * <pre> * 1) remove the WithName node from the deleted list (if it exists) * 2) replace the WithName node in the created list with srcChild * 3) add srcChild back as a child of srcParent. Note that we already add * the node into the created list of a snapshot diff in step 2, we do not need * to add srcChild to the created list of the latest snapshot. * </pre> * * We do not need to update quota usage because the old child is in the * deleted list before. * * @param oldChild * The reference node to be removed/replaced * @param newChild * The node to be added back */ public void undoRename4ScrParent(final INodeReference oldChild, final INode newChild) { DirectoryWithSnapshotFeature sf = getDirectoryWithSnapshotFeature(); assert sf != null : "Directory does not have snapshot feature"; sf.getDiffs().removeDeletedChild(oldChild); sf.getDiffs().replaceCreatedChild(oldChild, newChild); addChild(newChild, true, Snapshot.CURRENT_STATE_ID); } /** * Undo the rename operation for the dst tree, i.e., if the rename operation * (with OVERWRITE option) removes a file/dir from the dst tree, add it back * and delete possible record in the deleted list. */ public void undoRename4DstParent(final BlockStoragePolicySuite bsps, final INode deletedChild, int latestSnapshotId) { DirectoryWithSnapshotFeature sf = getDirectoryWithSnapshotFeature(); assert sf != null : "Directory does not have snapshot feature"; boolean removeDeletedChild = sf.getDiffs().removeDeletedChild(deletedChild); int sid = removeDeletedChild ? Snapshot.CURRENT_STATE_ID : latestSnapshotId; final boolean added = addChild(deletedChild, true, sid); // update quota usage if adding is successfully and the old child has not // been stored in deleted list before if (added && !removeDeletedChild) { final QuotaCounts counts = deletedChild.computeQuotaUsage(bsps); addSpaceConsumed(counts); } } /** Set the children list to null. */ public void clearChildren() { this.children = null; } @Override public void clear() { super.clear(); clearChildren(); } /** Call cleanSubtree(..) recursively down the subtree. */ public void cleanSubtreeRecursively( ReclaimContext reclaimContext, final int snapshot, int prior, final Map<INode, INode> excludedNodes) { // in case of deletion snapshot, since this call happens after we modify // the diff list, the snapshot to be deleted has been combined or renamed // to its latest previous snapshot. (besides, we also need to consider nodes // created after prior but before snapshot. this will be done in // DirectoryWithSnapshotFeature) int s = snapshot != Snapshot.CURRENT_STATE_ID && prior != Snapshot.NO_SNAPSHOT_ID ? prior : snapshot; for (INode child : getChildrenList(s)) { if (snapshot == Snapshot.CURRENT_STATE_ID || excludedNodes == null || !excludedNodes.containsKey(child)) { child.cleanSubtree(reclaimContext, snapshot, prior); } } } @Override public void destroyAndCollectBlocks(ReclaimContext reclaimContext) { reclaimContext.quotaDelta().add( new QuotaCounts.Builder().nameSpace(1).build()); final DirectoryWithSnapshotFeature sf = getDirectoryWithSnapshotFeature(); if (sf != null) { sf.clear(reclaimContext, this); } for (INode child : getChildrenList(Snapshot.CURRENT_STATE_ID)) { child.destroyAndCollectBlocks(reclaimContext); } if (getAclFeature() != null) { AclStorage.removeAclFeature(getAclFeature()); } clear(); reclaimContext.removedINodes.add(this); } @Override public void cleanSubtree(ReclaimContext reclaimContext, final int snapshotId, int priorSnapshotId) { DirectoryWithSnapshotFeature sf = getDirectoryWithSnapshotFeature(); // there is snapshot data if (sf != null) { sf.cleanDirectory(reclaimContext, this, snapshotId, priorSnapshotId); // If the inode has empty diff list and sf is not a // DirectorySnapshottableFeature, remove the feature to save heap. if (sf.getDiffs().isEmpty() && !(sf instanceof DirectorySnapshottableFeature) && getDirectoryWithSnapshotFeature() != null) { this.removeFeature(sf); } } else { // there is no snapshot data if (priorSnapshotId == Snapshot.NO_SNAPSHOT_ID && snapshotId == Snapshot.CURRENT_STATE_ID) { // destroy the whole subtree and collect blocks that should be deleted destroyAndCollectBlocks(reclaimContext); } else { // make a copy the quota delta QuotaCounts old = reclaimContext.quotaDelta().getCountsCopy(); // process recursively down the subtree cleanSubtreeRecursively(reclaimContext, snapshotId, priorSnapshotId, null); QuotaCounts current = reclaimContext.quotaDelta().getCountsCopy(); current.subtract(old); if (isQuotaSet()) { reclaimContext.quotaDelta().addQuotaDirUpdate(this, current); } } } } /** * Compare the metadata with another INodeDirectory */ @Override public boolean metadataEquals(INodeDirectoryAttributes other) { return other != null && getQuotaCounts().equals(other.getQuotaCounts()) && getPermissionLong() == other.getPermissionLong() && getAclFeature() == other.getAclFeature() && getXAttrFeature() == other.getXAttrFeature(); } /* * The following code is to dump the tree recursively for testing. * * \- foo (INodeDirectory@33dd2717) * \- sub1 (INodeDirectory@442172) * +- file1 (INodeFile@78392d4) * +- file2 (INodeFile@78392d5) * +- sub11 (INodeDirectory@8400cff) * \- file3 (INodeFile@78392d6) * \- z_file4 (INodeFile@45848712) */ static final String DUMPTREE_EXCEPT_LAST_ITEM = "+-"; static final String DUMPTREE_LAST_ITEM = "\\-"; @VisibleForTesting @Override public void dumpTreeRecursively(PrintWriter out, StringBuilder prefix, final int snapshot) { super.dumpTreeRecursively(out, prefix, snapshot); out.print(", childrenSize=" + getChildrenList(snapshot).size()); final DirectoryWithQuotaFeature q = getDirectoryWithQuotaFeature(); if (q != null) { out.print(", " + q); } if (this instanceof Snapshot.Root) { out.print(", snapshotId=" + snapshot); } out.println(); if (prefix.length() >= 2) { prefix.setLength(prefix.length() - 2); prefix.append(" "); } final DirectoryWithSnapshotFeature snapshotFeature = getDirectoryWithSnapshotFeature(); if (snapshotFeature != null) { out.print(prefix); out.print(snapshotFeature); } out.println(); dumpTreeRecursively(out, prefix, new Iterable<SnapshotAndINode>() { final Iterator<INode> i = getChildrenList(snapshot).iterator(); @Override public Iterator<SnapshotAndINode> iterator() { return new Iterator<SnapshotAndINode>() { @Override public boolean hasNext() { return i.hasNext(); } @Override public SnapshotAndINode next() { return new SnapshotAndINode(snapshot, i.next()); } @Override public void remove() { throw new UnsupportedOperationException(); } }; } }); final DirectorySnapshottableFeature s = getDirectorySnapshottableFeature(); if (s != null) { s.dumpTreeRecursively(this, out, prefix, snapshot); } } /** * Dump the given subtrees. * @param prefix The prefix string that each line should print. * @param subs The subtrees. */ @VisibleForTesting public static void dumpTreeRecursively(PrintWriter out, StringBuilder prefix, Iterable<SnapshotAndINode> subs) { if (subs != null) { for(final Iterator<SnapshotAndINode> i = subs.iterator(); i.hasNext();) { final SnapshotAndINode pair = i.next(); prefix.append(i.hasNext()? DUMPTREE_EXCEPT_LAST_ITEM: DUMPTREE_LAST_ITEM); pair.inode.dumpTreeRecursively(out, prefix, pair.snapshotId); prefix.setLength(prefix.length() - 2); } } } /** A pair of Snapshot and INode objects. */ public static class SnapshotAndINode { public final int snapshotId; public final INode inode; public SnapshotAndINode(int snapshot, INode inode) { this.snapshotId = snapshot; this.inode = inode; } } @Override public void accept(NamespaceVisitor visitor, int snapshot) { visitor.visitDirectoryRecursively(this, snapshot); } public final int getChildrenNum(final int snapshotId) { return getChildrenList(snapshotId).size(); } }
apache/zookeeper
36,411
zookeeper-server/src/main/java/org/apache/zookeeper/server/NettyServerCnxnFactory.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zookeeper.server; import io.netty.bootstrap.ServerBootstrap; import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBufAllocator; import io.netty.channel.Channel; import io.netty.channel.ChannelDuplexHandler; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelHandler; import io.netty.channel.ChannelHandler.Sharable; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelOption; import io.netty.channel.ChannelPipeline; import io.netty.channel.ChannelPromise; import io.netty.channel.EventLoopGroup; import io.netty.channel.group.ChannelGroup; import io.netty.channel.group.ChannelGroupFuture; import io.netty.channel.group.DefaultChannelGroup; import io.netty.channel.socket.SocketChannel; import io.netty.handler.ssl.OptionalSslHandler; import io.netty.handler.ssl.SslContext; import io.netty.handler.ssl.SslHandler; import io.netty.util.AttributeKey; import io.netty.util.ReferenceCountUtil; import io.netty.util.concurrent.DefaultEventExecutor; import io.netty.util.concurrent.Future; import io.netty.util.concurrent.GenericFutureListener; import java.io.IOException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import javax.net.ssl.SSLEngine; import javax.net.ssl.SSLException; import javax.net.ssl.SSLPeerUnverifiedException; import javax.net.ssl.SSLSession; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.common.ClientX509Util; import org.apache.zookeeper.common.NettyUtils; import org.apache.zookeeper.common.X509Exception; import org.apache.zookeeper.common.X509Exception.SSLContextException; import org.apache.zookeeper.common.ZKConfig; import org.apache.zookeeper.server.NettyServerCnxn.HandshakeState; import org.apache.zookeeper.server.auth.ProviderRegistry; import org.apache.zookeeper.server.auth.X509AuthenticationProvider; import org.apache.zookeeper.server.quorum.QuorumPeerConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class NettyServerCnxnFactory extends ServerCnxnFactory { private static final Logger LOG = LoggerFactory.getLogger(NettyServerCnxnFactory.class); /** * Allow client-server sockets to accept both SSL and plaintext connections */ public static final String PORT_UNIFICATION_KEY = "zookeeper.client.portUnification"; public static final String EARLY_DROP_SECURE_CONNECTION_HANDSHAKES = "zookeeper.netty.server.earlyDropSecureConnectionHandshakes"; private final boolean shouldUsePortUnification; /** * The first byte in TLS protocol is the content type of the subsequent record. * Handshakes use value 22 (0x16) so the first byte offered on any TCP connection * attempting to establish a TLS connection will be this value. * https://tools.ietf.org/html/rfc8446#page-79 */ private static final byte TLS_HANDSHAKE_RECORD_TYPE = 0x16; private final AtomicInteger outstandingHandshake = new AtomicInteger(); public static final String OUTSTANDING_HANDSHAKE_LIMIT = "zookeeper.netty.server.outstandingHandshake.limit"; private int outstandingHandshakeLimit; private boolean handshakeThrottlingEnabled; public void setOutstandingHandshakeLimit(int limit) { outstandingHandshakeLimit = limit; handshakeThrottlingEnabled = (secure || shouldUsePortUnification) && outstandingHandshakeLimit > 0; LOG.info("handshakeThrottlingEnabled = {}, {} = {}", handshakeThrottlingEnabled, OUTSTANDING_HANDSHAKE_LIMIT, outstandingHandshakeLimit); } private final ServerBootstrap bootstrap; private Channel parentChannel; private final ChannelGroup allChannels = new DefaultChannelGroup("zkServerCnxns", new DefaultEventExecutor()); private final Map<InetAddress, AtomicInteger> ipMap = new ConcurrentHashMap<>(); private InetSocketAddress localAddress; private int maxClientCnxns = 60; int listenBacklog = -1; private final ClientX509Util x509Util; public static final String NETTY_ADVANCED_FLOW_CONTROL = "zookeeper.netty.advancedFlowControl.enabled"; private boolean advancedFlowControlEnabled = false; private static final AttributeKey<NettyServerCnxn> CONNECTION_ATTRIBUTE = AttributeKey.valueOf("NettyServerCnxn"); private static final AtomicReference<ByteBufAllocator> TEST_ALLOCATOR = new AtomicReference<>(null); public static final String CLIENT_CERT_RELOAD_KEY = "zookeeper.client.certReload"; /** * A handler that detects whether the client would like to use * TLS or not and responds in kind. The first bytes are examined * for the static TLS headers to make the determination and * placed back in the stream with the correct ChannelHandler * instantiated. */ class DualModeSslHandler extends OptionalSslHandler { DualModeSslHandler(SslContext sslContext) { super(sslContext); } @Override protected void decode(ChannelHandlerContext context, ByteBuf in, List<Object> out) throws Exception { if (in.readableBytes() >= 5) { super.decode(context, in, out); } else if (in.readableBytes() > 0) { // It requires 5 bytes to detect a proper ssl connection. In the // case that the server receives fewer, check if we can fail to plaintext. // This will occur when for any four letter work commands. if (TLS_HANDSHAKE_RECORD_TYPE != in.getByte(0)) { LOG.debug("first byte {} does not match TLS handshake, failing to plaintext", in.getByte(0)); handleNonSsl(context); } } } /** * pulled directly from OptionalSslHandler to allow for access * @param context */ private void handleNonSsl(ChannelHandlerContext context) { ChannelHandler handler = this.newNonSslHandler(context); if (handler != null) { context.pipeline().replace(this, this.newNonSslHandlerName(), handler); } else { context.pipeline().remove(this); } } @Override protected SslHandler newSslHandler(ChannelHandlerContext context, SslContext sslContext) { NettyServerCnxn cnxn = Objects.requireNonNull(context.channel().attr(CONNECTION_ATTRIBUTE).get()); LOG.debug("creating ssl handler for session {}", cnxn.getSessionId()); SslHandler handler = super.newSslHandler(context, sslContext); Future<Channel> handshakeFuture = handler.handshakeFuture(); handshakeFuture.addListener(new CertificateVerifier(handler, cnxn)); return handler; } @Override protected ChannelHandler newNonSslHandler(ChannelHandlerContext context) { NettyServerCnxn cnxn = Objects.requireNonNull(context.channel().attr(CONNECTION_ATTRIBUTE).get()); LOG.debug("creating plaintext handler for session {}", cnxn.getSessionId()); // Mark handshake finished if it's a insecure cnxn updateHandshakeCountIfStarted(cnxn); allChannels.add(context.channel()); addCnxn(cnxn); return super.newNonSslHandler(context); } } private void updateHandshakeCountIfStarted(NettyServerCnxn cnxn) { if (cnxn != null && cnxn.getHandshakeState() == HandshakeState.STARTED) { cnxn.setHandshakeState(HandshakeState.FINISHED); outstandingHandshake.addAndGet(-1); } } /** * This is an inner class since we need to extend ChannelDuplexHandler, but * NettyServerCnxnFactory already extends ServerCnxnFactory. By making it inner * this class gets access to the member variables and methods. */ @Sharable class CnxnChannelHandler extends ChannelDuplexHandler { @Override public void channelActive(ChannelHandlerContext ctx) throws Exception { if (LOG.isTraceEnabled()) { LOG.trace("Channel active {}", ctx.channel()); } final Channel channel = ctx.channel(); if (limitTotalNumberOfCnxns()) { ServerMetrics.getMetrics().CONNECTION_REJECTED.add(1); channel.close(); return; } InetAddress addr = ((InetSocketAddress) channel.remoteAddress()).getAddress(); if (maxClientCnxns > 0 && getClientCnxnCount(addr) >= maxClientCnxns) { ServerMetrics.getMetrics().CONNECTION_REJECTED.add(1); LOG.warn("Too many connections from {} - max is {}", addr, maxClientCnxns); channel.close(); return; } NettyServerCnxn cnxn = new NettyServerCnxn(channel, zkServer, NettyServerCnxnFactory.this); ctx.channel().attr(CONNECTION_ATTRIBUTE).set(cnxn); // Check the zkServer assigned to the cnxn is still running, // close it before starting the heavy TLS handshake if (secure && !cnxn.isZKServerRunning()) { boolean earlyDropSecureConnectionHandshakes = Boolean.getBoolean(EARLY_DROP_SECURE_CONNECTION_HANDSHAKES); if (earlyDropSecureConnectionHandshakes) { LOG.info("Zookeeper server is not running, close the connection to {} before starting the TLS handshake", cnxn.getChannel().remoteAddress()); ServerMetrics.getMetrics().CNXN_CLOSED_WITHOUT_ZK_SERVER_RUNNING.add(1); channel.close(); return; } } if (handshakeThrottlingEnabled) { // Favor to check and throttling even in dual mode which // accepts both secure and insecure connections, since // it's more efficient than throttling when we know it's // a secure connection in DualModeSslHandler. // // From benchmark, this reduced around 15% reconnect time. int outstandingHandshakesNum = outstandingHandshake.addAndGet(1); if (outstandingHandshakesNum > outstandingHandshakeLimit) { outstandingHandshake.addAndGet(-1); channel.close(); ServerMetrics.getMetrics().TLS_HANDSHAKE_EXCEEDED.add(1); } else { cnxn.setHandshakeState(HandshakeState.STARTED); } } if (secure) { SslHandler sslHandler = ctx.pipeline().get(SslHandler.class); Future<Channel> handshakeFuture = sslHandler.handshakeFuture(); handshakeFuture.addListener(new CertificateVerifier(sslHandler, cnxn)); } else if (!shouldUsePortUnification) { allChannels.add(ctx.channel()); addCnxn(cnxn); } if (ctx.channel().pipeline().get(SslHandler.class) == null) { if (zkServer != null) { SocketAddress remoteAddress = cnxn.getChannel().remoteAddress(); if (remoteAddress != null && !((InetSocketAddress) remoteAddress).getAddress().isLoopbackAddress()) { LOG.trace("NettyChannelHandler channelActive: remote={} local={}", remoteAddress, cnxn.getChannel().localAddress()); zkServer.serverStats().incrementNonMTLSRemoteConnCount(); } else { zkServer.serverStats().incrementNonMTLSLocalConnCount(); } } else { LOG.trace("Opened non-TLS connection from {} but zkServer is not running", cnxn.getChannel().remoteAddress()); } } } @Override public void channelInactive(ChannelHandlerContext ctx) throws Exception { if (LOG.isTraceEnabled()) { LOG.trace("Channel inactive {}", ctx.channel()); } allChannels.remove(ctx.channel()); NettyServerCnxn cnxn = ctx.channel().attr(CONNECTION_ATTRIBUTE).getAndSet(null); if (cnxn != null) { if (LOG.isTraceEnabled()) { LOG.trace("Channel inactive caused close {}", cnxn); } updateHandshakeCountIfStarted(cnxn); cnxn.close(ServerCnxn.DisconnectReason.CHANNEL_DISCONNECTED); } } @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { LOG.warn("Exception caught", cause); NettyServerCnxn cnxn = ctx.channel().attr(CONNECTION_ATTRIBUTE).getAndSet(null); if (cnxn != null) { LOG.debug("Closing {}", cnxn); updateHandshakeCountIfStarted(cnxn); cnxn.close(ServerCnxn.DisconnectReason.CHANNEL_CLOSED_EXCEPTION); } } @Override public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception { try { if (evt == NettyServerCnxn.ReadEvent.ENABLE) { LOG.debug("Received ReadEvent.ENABLE"); NettyServerCnxn cnxn = ctx.channel().attr(CONNECTION_ATTRIBUTE).get(); // TODO: Not sure if cnxn can be null here. It becomes null if channelInactive() // or exceptionCaught() trigger, but it's unclear to me if userEventTriggered() can run // after either of those. Check for null just to be safe ... if (cnxn != null) { if (cnxn.getQueuedReadableBytes() > 0) { cnxn.processQueuedBuffer(); if (advancedFlowControlEnabled && cnxn.getQueuedReadableBytes() == 0) { // trigger a read if we have consumed all // backlog ctx.read(); LOG.debug("Issued a read after queuedBuffer drained"); } } } if (!advancedFlowControlEnabled) { ctx.channel().config().setAutoRead(true); } } else if (evt == NettyServerCnxn.ReadEvent.DISABLE) { LOG.debug("Received ReadEvent.DISABLE"); ctx.channel().config().setAutoRead(false); } } finally { ReferenceCountUtil.release(evt); } } @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { try { if (LOG.isTraceEnabled()) { LOG.trace("message received called {}", msg); } try { LOG.debug("New message {} from {}", msg, ctx.channel()); NettyServerCnxn cnxn = ctx.channel().attr(CONNECTION_ATTRIBUTE).get(); if (cnxn == null) { LOG.error("channelRead() on a closed or closing NettyServerCnxn"); } else { cnxn.processMessage((ByteBuf) msg); } } catch (Exception ex) { LOG.error("Unexpected exception in receive", ex); throw ex; } } finally { ReferenceCountUtil.release(msg); } } @Override public void channelReadComplete(ChannelHandlerContext ctx) throws Exception { if (advancedFlowControlEnabled) { NettyServerCnxn cnxn = ctx.channel().attr(CONNECTION_ATTRIBUTE).get(); if (cnxn != null && cnxn.getQueuedReadableBytes() == 0 && cnxn.readIssuedAfterReadComplete == 0) { ctx.read(); LOG.debug("Issued a read since we do not have anything to consume after channelReadComplete"); } } ctx.fireChannelReadComplete(); } // Use a single listener instance to reduce GC // Note: this listener is only added when LOG.isTraceEnabled() is true, // so it should not do any work other than trace logging. private final GenericFutureListener<Future<Void>> onWriteCompletedTracer = (f) -> { if (LOG.isTraceEnabled()) { LOG.trace("write success: {}", f.isSuccess()); } }; @Override public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception { if (LOG.isTraceEnabled()) { promise.addListener(onWriteCompletedTracer); } super.write(ctx, msg, promise); } } final class CertificateVerifier implements GenericFutureListener<Future<Channel>> { private final SslHandler sslHandler; private final NettyServerCnxn cnxn; CertificateVerifier(SslHandler sslHandler, NettyServerCnxn cnxn) { this.sslHandler = sslHandler; this.cnxn = cnxn; } /** * Only allow the connection to stay open if certificate passes auth */ public void operationComplete(Future<Channel> future) { updateHandshakeCountIfStarted(cnxn); if (future.isSuccess()) { LOG.debug("Successful handshake with session 0x{}", Long.toHexString(cnxn.getSessionId())); SSLEngine eng = sslHandler.engine(); // Don't try to verify certificate if we didn't ask client to present one if (eng.getNeedClientAuth() || eng.getWantClientAuth()) { SSLSession session = eng.getSession(); try { cnxn.setClientCertificateChain(session.getPeerCertificates()); } catch (SSLPeerUnverifiedException e) { if (eng.getNeedClientAuth()) { // Certificate was requested but not present LOG.error("Error getting peer certificates", e); cnxn.close(); return; } else { // Certificate was requested but was optional // TODO: what auth info should we set on the connection? final Channel futureChannel = future.getNow(); allChannels.add(Objects.requireNonNull(futureChannel)); addCnxn(cnxn); return; } } catch (Exception e) { LOG.error("Error getting peer certificates", e); cnxn.close(); return; } String authProviderProp = System.getProperty(x509Util.getSslAuthProviderProperty(), "x509"); X509AuthenticationProvider authProvider = (X509AuthenticationProvider) ProviderRegistry.getProvider(authProviderProp); if (authProvider == null) { LOG.error("X509 Auth provider not found: {}", authProviderProp); cnxn.close(ServerCnxn.DisconnectReason.AUTH_PROVIDER_NOT_FOUND); return; } KeeperException.Code code = authProvider.handleAuthentication(cnxn, null); if (KeeperException.Code.OK != code) { zkServer.serverStats().incrementAuthFailedCount(); LOG.error("Authentication failed for session 0x{}", Long.toHexString(cnxn.getSessionId())); cnxn.close(ServerCnxn.DisconnectReason.SASL_AUTH_FAILURE); return; } } final Channel futureChannel = future.getNow(); allChannels.add(Objects.requireNonNull(futureChannel)); addCnxn(cnxn); } else { zkServer.serverStats().incrementAuthFailedCount(); LOG.error("Unsuccessful handshake with session 0x{}", Long.toHexString(cnxn.getSessionId())); ServerMetrics.getMetrics().UNSUCCESSFUL_HANDSHAKE.add(1); cnxn.close(ServerCnxn.DisconnectReason.FAILED_HANDSHAKE); } } } @Sharable static class ReadIssuedTrackingHandler extends ChannelDuplexHandler { @Override public void read(ChannelHandlerContext ctx) throws Exception { NettyServerCnxn cnxn = ctx.channel().attr(CONNECTION_ATTRIBUTE).get(); if (cnxn != null) { cnxn.readIssuedAfterReadComplete++; } ctx.read(); } @Override public void channelReadComplete(ChannelHandlerContext ctx) throws Exception { NettyServerCnxn cnxn = ctx.channel().attr(CONNECTION_ATTRIBUTE).get(); if (cnxn != null) { cnxn.readIssuedAfterReadComplete = 0; } ctx.fireChannelReadComplete(); } } CnxnChannelHandler channelHandler = new CnxnChannelHandler(); ReadIssuedTrackingHandler readIssuedTrackingHandler = new ReadIssuedTrackingHandler(); private ServerBootstrap configureBootstrapAllocator(ServerBootstrap bootstrap) { ByteBufAllocator testAllocator = TEST_ALLOCATOR.get(); if (testAllocator != null) { return bootstrap.option(ChannelOption.ALLOCATOR, testAllocator) .childOption(ChannelOption.ALLOCATOR, testAllocator); } else { return bootstrap; } } NettyServerCnxnFactory() { x509Util = new ClientX509Util(); boolean useClientReload = Boolean.getBoolean(CLIENT_CERT_RELOAD_KEY); LOG.info("{}={}", CLIENT_CERT_RELOAD_KEY, useClientReload); if (useClientReload) { try { x509Util.enableCertFileReloading(); } catch (IOException e) { LOG.error("unable to set up client certificate reload filewatcher", e); useClientReload = false; } } boolean usePortUnification = Boolean.getBoolean(PORT_UNIFICATION_KEY); LOG.info("{}={}", PORT_UNIFICATION_KEY, usePortUnification); if (usePortUnification) { try { QuorumPeerConfig.configureSSLAuth(); } catch (QuorumPeerConfig.ConfigException e) { LOG.error("unable to set up SslAuthProvider, turning off client port unification", e); usePortUnification = false; } } this.shouldUsePortUnification = usePortUnification; this.advancedFlowControlEnabled = Boolean.getBoolean(NETTY_ADVANCED_FLOW_CONTROL); LOG.info("{} = {}", NETTY_ADVANCED_FLOW_CONTROL, this.advancedFlowControlEnabled); setOutstandingHandshakeLimit(Integer.getInteger(OUTSTANDING_HANDSHAKE_LIMIT, -1)); EventLoopGroup bossGroup = NettyUtils.newNioOrEpollEventLoopGroup(NettyUtils.getClientReachableLocalInetAddressCount()); EventLoopGroup workerGroup = NettyUtils.newNioOrEpollEventLoopGroup(); ServerBootstrap bootstrap = new ServerBootstrap().group(bossGroup, workerGroup) .channel(NettyUtils.nioOrEpollServerSocketChannel()) // parent channel options .option(ChannelOption.SO_REUSEADDR, true) // child channels options .childOption(ChannelOption.TCP_NODELAY, true) .childOption(ChannelOption.SO_LINGER, -1) .childHandler(new ChannelInitializer<SocketChannel>() { @Override protected void initChannel(SocketChannel ch) throws Exception { ChannelPipeline pipeline = ch.pipeline(); if (advancedFlowControlEnabled) { pipeline.addLast(readIssuedTrackingHandler); } if (secure) { initSSL(pipeline, false); } else if (shouldUsePortUnification) { initSSL(pipeline, true); } pipeline.addLast("servercnxnfactory", channelHandler); } }); this.bootstrap = configureBootstrapAllocator(bootstrap); this.bootstrap.validate(); } private synchronized void initSSL(ChannelPipeline p, boolean supportPlaintext) throws X509Exception, SSLException { String authProviderProp = System.getProperty(x509Util.getSslAuthProviderProperty()); SslContext nettySslContext; if (authProviderProp == null) { nettySslContext = x509Util.createNettySslContextForServer(new ZKConfig()); } else { X509AuthenticationProvider authProvider = (X509AuthenticationProvider) ProviderRegistry.getProvider( System.getProperty(x509Util.getSslAuthProviderProperty(), "x509")); if (authProvider == null) { LOG.error("Auth provider not found: {}", authProviderProp); throw new SSLContextException("Could not create SSLContext with specified auth provider: " + authProviderProp); } nettySslContext = x509Util.createNettySslContextForServer( new ZKConfig(), authProvider.getKeyManager(), authProvider.getTrustManager()); } if (supportPlaintext) { p.addLast("ssl", new DualModeSslHandler(nettySslContext)); LOG.debug("dual mode SSL handler added for channel: {}", p.channel()); } else { p.addLast("ssl", nettySslContext.newHandler(p.channel().alloc())); LOG.debug("SSL handler added for channel: {}", p.channel()); } } @Override public void closeAll(ServerCnxn.DisconnectReason reason) { LOG.debug("closeAll()"); // clear all the connections on which we are selecting int length = cnxns.size(); for (ServerCnxn cnxn : cnxns) { try { // This will remove the cnxn from cnxns cnxn.close(reason); } catch (Exception e) { LOG.warn("Ignoring exception closing cnxn sessionid 0x{}", Long.toHexString(cnxn.getSessionId()), e); } } LOG.debug("allChannels size: {} cnxns size: {}", allChannels.size(), length); } @Override public void configure(InetSocketAddress addr, int maxClientCnxns, int backlog, boolean secure) throws IOException { configureSaslLogin(); initMaxCnxns(); localAddress = addr; this.maxClientCnxns = maxClientCnxns; this.secure = secure; this.listenBacklog = backlog; LOG.info("configure {} secure: {} on addr {}", this, secure, addr); } /** {@inheritDoc} */ public int getMaxClientCnxnsPerHost() { return maxClientCnxns; } /** {@inheritDoc} */ public void setMaxClientCnxnsPerHost(int max) { maxClientCnxns = max; } /** {@inheritDoc} */ public int getSocketListenBacklog() { return listenBacklog; } @Override public int getLocalPort() { return localAddress.getPort(); } private boolean killed; // use synchronized(this) to access @Override public void join() throws InterruptedException { synchronized (this) { while (!killed) { wait(); } } } @Override public void shutdown() { synchronized (this) { if (killed) { LOG.info("already shutdown {}", localAddress); return; } } LOG.info("shutdown called {}", localAddress); x509Util.close(); if (login != null) { login.shutdown(); } final EventLoopGroup bossGroup = bootstrap.config().group(); final EventLoopGroup workerGroup = bootstrap.config().childGroup(); // null if factory never started if (parentChannel != null) { ChannelFuture parentCloseFuture = parentChannel.close(); if (bossGroup != null) { parentCloseFuture.addListener(future -> { bossGroup.shutdownGracefully(); }); } closeAll(ServerCnxn.DisconnectReason.SERVER_SHUTDOWN); ChannelGroupFuture allChannelsCloseFuture = allChannels.close(); if (workerGroup != null) { allChannelsCloseFuture.addListener(future -> { workerGroup.shutdownGracefully(); }); } } else { if (bossGroup != null) { bossGroup.shutdownGracefully(); } if (workerGroup != null) { workerGroup.shutdownGracefully(); } } if (zkServer != null) { zkServer.shutdown(); } synchronized (this) { killed = true; notifyAll(); } } @Override public void start() { if (listenBacklog != -1) { bootstrap.option(ChannelOption.SO_BACKLOG, listenBacklog); } LOG.info("binding to port {}", localAddress); parentChannel = bootstrap.bind(localAddress).syncUninterruptibly().channel(); // Port changes after bind() if the original port was 0, update // localAddress to get the real port. localAddress = (InetSocketAddress) parentChannel.localAddress(); LOG.info("bound to port {}", getLocalPort()); } public void reconfigure(InetSocketAddress addr) { LOG.info("binding to port {}, {}", addr, localAddress); if (addr != null && localAddress != null) { if (addr.equals(localAddress) || (addr.getAddress().isAnyLocalAddress() && localAddress.getAddress().isAnyLocalAddress() && addr.getPort() == localAddress.getPort())) { LOG.info("address is the same, skip rebinding"); return; } } Channel oldChannel = parentChannel; try { parentChannel = bootstrap.bind(addr).syncUninterruptibly().channel(); // Port changes after bind() if the original port was 0, update // localAddress to get the real port. localAddress = (InetSocketAddress) parentChannel.localAddress(); LOG.info("bound to port {}", getLocalPort()); } catch (Exception e) { LOG.error("Error while reconfiguring", e); } finally { oldChannel.close(); } } @Override public void startup(ZooKeeperServer zks, boolean startServer) throws IOException, InterruptedException { start(); setZooKeeperServer(zks); if (startServer) { zks.startdata(); zks.startup(); } } @Override public Iterable<ServerCnxn> getConnections() { return cnxns; } @Override public InetSocketAddress getLocalAddress() { return localAddress; } private void addCnxn(final NettyServerCnxn cnxn) { cnxns.add(cnxn); InetAddress addr = ((InetSocketAddress) cnxn.getChannel().remoteAddress()).getAddress(); ipMap.compute(addr, (a, cnxnCount) -> { if (cnxnCount == null) { cnxnCount = new AtomicInteger(); } cnxnCount.incrementAndGet(); return cnxnCount; }); } void removeCnxnFromIpMap(NettyServerCnxn cnxn, InetAddress remoteAddress) { ipMap.compute(remoteAddress, (addr, cnxnCount) -> { if (cnxnCount == null) { LOG.error("Unexpected remote address {} when removing cnxn {}", remoteAddress, cnxn); return null; } final int newValue = cnxnCount.decrementAndGet(); return newValue == 0 ? null : cnxnCount; }); } private int getClientCnxnCount(final InetAddress addr) { final AtomicInteger count = ipMap.get(addr); return count == null ? 0 : count.get(); } @Override public void resetAllConnectionStats() { // No need to synchronize since cnxns is backed by a ConcurrentHashMap for (ServerCnxn c : cnxns) { c.resetStats(); } } @Override public Iterable<Map<String, Object>> getAllConnectionInfo(boolean brief) { Set<Map<String, Object>> info = new HashSet<>(); // No need to synchronize since cnxns is backed by a ConcurrentHashMap for (ServerCnxn c : cnxns) { info.add(c.getConnectionInfo(brief)); } return info; } /** * Sets the test ByteBufAllocator. This allocator will be used by all * future instances of this class. * It is not recommended to use this method outside of testing. * @param allocator the ByteBufAllocator to use for all netty buffer * allocations. */ static void setTestAllocator(ByteBufAllocator allocator) { TEST_ALLOCATOR.set(allocator); } /** * Clears the test ByteBufAllocator. The default allocator will be used * by all future instances of this class. * It is not recommended to use this method outside of testing. */ static void clearTestAllocator() { TEST_ALLOCATOR.set(null); } // VisibleForTest public void setAdvancedFlowControlEnabled(boolean advancedFlowControlEnabled) { this.advancedFlowControlEnabled = advancedFlowControlEnabled; } // VisibleForTest public void setSecure(boolean secure) { this.secure = secure; } // VisibleForTest public Channel getParentChannel() { return parentChannel; } public int getOutstandingHandshakeNum() { return outstandingHandshake.get(); } }
googleapis/google-cloud-java
36,060
java-run/proto-google-cloud-run-v2/src/main/java/com/google/cloud/run/v2/TrafficTargetStatus.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/run/v2/traffic_target.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.run.v2; /** * * * <pre> * Represents the observed state of a single `TrafficTarget` entry. * </pre> * * Protobuf type {@code google.cloud.run.v2.TrafficTargetStatus} */ public final class TrafficTargetStatus extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.run.v2.TrafficTargetStatus) TrafficTargetStatusOrBuilder { private static final long serialVersionUID = 0L; // Use TrafficTargetStatus.newBuilder() to construct. private TrafficTargetStatus(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private TrafficTargetStatus() { type_ = 0; revision_ = ""; tag_ = ""; uri_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new TrafficTargetStatus(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.run.v2.TrafficTargetProto .internal_static_google_cloud_run_v2_TrafficTargetStatus_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.run.v2.TrafficTargetProto .internal_static_google_cloud_run_v2_TrafficTargetStatus_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.run.v2.TrafficTargetStatus.class, com.google.cloud.run.v2.TrafficTargetStatus.Builder.class); } public static final int TYPE_FIELD_NUMBER = 1; private int type_ = 0; /** * * * <pre> * The allocation type for this traffic target. * </pre> * * <code>.google.cloud.run.v2.TrafficTargetAllocationType type = 1;</code> * * @return The enum numeric value on the wire for type. */ @java.lang.Override public int getTypeValue() { return type_; } /** * * * <pre> * The allocation type for this traffic target. * </pre> * * <code>.google.cloud.run.v2.TrafficTargetAllocationType type = 1;</code> * * @return The type. */ @java.lang.Override public com.google.cloud.run.v2.TrafficTargetAllocationType getType() { com.google.cloud.run.v2.TrafficTargetAllocationType result = com.google.cloud.run.v2.TrafficTargetAllocationType.forNumber(type_); return result == null ? com.google.cloud.run.v2.TrafficTargetAllocationType.UNRECOGNIZED : result; } public static final int REVISION_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object revision_ = ""; /** * * * <pre> * Revision to which this traffic is sent. * </pre> * * <code>string revision = 2 [(.google.api.resource_reference) = { ... }</code> * * @return The revision. */ @java.lang.Override public java.lang.String getRevision() { java.lang.Object ref = revision_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); revision_ = s; return s; } } /** * * * <pre> * Revision to which this traffic is sent. * </pre> * * <code>string revision = 2 [(.google.api.resource_reference) = { ... }</code> * * @return The bytes for revision. */ @java.lang.Override public com.google.protobuf.ByteString getRevisionBytes() { java.lang.Object ref = revision_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); revision_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PERCENT_FIELD_NUMBER = 3; private int percent_ = 0; /** * * * <pre> * Specifies percent of the traffic to this Revision. * </pre> * * <code>int32 percent = 3;</code> * * @return The percent. */ @java.lang.Override public int getPercent() { return percent_; } public static final int TAG_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object tag_ = ""; /** * * * <pre> * Indicates the string used in the URI to exclusively reference this target. * </pre> * * <code>string tag = 4;</code> * * @return The tag. */ @java.lang.Override public java.lang.String getTag() { java.lang.Object ref = tag_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); tag_ = s; return s; } } /** * * * <pre> * Indicates the string used in the URI to exclusively reference this target. * </pre> * * <code>string tag = 4;</code> * * @return The bytes for tag. */ @java.lang.Override public com.google.protobuf.ByteString getTagBytes() { java.lang.Object ref = tag_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); tag_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int URI_FIELD_NUMBER = 5; @SuppressWarnings("serial") private volatile java.lang.Object uri_ = ""; /** * * * <pre> * Displays the target URI. * </pre> * * <code>string uri = 5;</code> * * @return The uri. */ @java.lang.Override public java.lang.String getUri() { java.lang.Object ref = uri_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); uri_ = s; return s; } } /** * * * <pre> * Displays the target URI. * </pre> * * <code>string uri = 5;</code> * * @return The bytes for uri. */ @java.lang.Override public com.google.protobuf.ByteString getUriBytes() { java.lang.Object ref = uri_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); uri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (type_ != com.google.cloud.run.v2.TrafficTargetAllocationType .TRAFFIC_TARGET_ALLOCATION_TYPE_UNSPECIFIED .getNumber()) { output.writeEnum(1, type_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(revision_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, revision_); } if (percent_ != 0) { output.writeInt32(3, percent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(tag_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, tag_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uri_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, uri_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (type_ != com.google.cloud.run.v2.TrafficTargetAllocationType .TRAFFIC_TARGET_ALLOCATION_TYPE_UNSPECIFIED .getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, type_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(revision_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, revision_); } if (percent_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, percent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(tag_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, tag_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uri_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, uri_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.run.v2.TrafficTargetStatus)) { return super.equals(obj); } com.google.cloud.run.v2.TrafficTargetStatus other = (com.google.cloud.run.v2.TrafficTargetStatus) obj; if (type_ != other.type_) return false; if (!getRevision().equals(other.getRevision())) return false; if (getPercent() != other.getPercent()) return false; if (!getTag().equals(other.getTag())) return false; if (!getUri().equals(other.getUri())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + TYPE_FIELD_NUMBER; hash = (53 * hash) + type_; hash = (37 * hash) + REVISION_FIELD_NUMBER; hash = (53 * hash) + getRevision().hashCode(); hash = (37 * hash) + PERCENT_FIELD_NUMBER; hash = (53 * hash) + getPercent(); hash = (37 * hash) + TAG_FIELD_NUMBER; hash = (53 * hash) + getTag().hashCode(); hash = (37 * hash) + URI_FIELD_NUMBER; hash = (53 * hash) + getUri().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.run.v2.TrafficTargetStatus parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.run.v2.TrafficTargetStatus parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.run.v2.TrafficTargetStatus parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.run.v2.TrafficTargetStatus parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.run.v2.TrafficTargetStatus parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.run.v2.TrafficTargetStatus parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.run.v2.TrafficTargetStatus parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.run.v2.TrafficTargetStatus parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.run.v2.TrafficTargetStatus parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.run.v2.TrafficTargetStatus parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.run.v2.TrafficTargetStatus parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.run.v2.TrafficTargetStatus parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.run.v2.TrafficTargetStatus prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Represents the observed state of a single `TrafficTarget` entry. * </pre> * * Protobuf type {@code google.cloud.run.v2.TrafficTargetStatus} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.run.v2.TrafficTargetStatus) com.google.cloud.run.v2.TrafficTargetStatusOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.run.v2.TrafficTargetProto .internal_static_google_cloud_run_v2_TrafficTargetStatus_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.run.v2.TrafficTargetProto .internal_static_google_cloud_run_v2_TrafficTargetStatus_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.run.v2.TrafficTargetStatus.class, com.google.cloud.run.v2.TrafficTargetStatus.Builder.class); } // Construct using com.google.cloud.run.v2.TrafficTargetStatus.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; type_ = 0; revision_ = ""; percent_ = 0; tag_ = ""; uri_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.run.v2.TrafficTargetProto .internal_static_google_cloud_run_v2_TrafficTargetStatus_descriptor; } @java.lang.Override public com.google.cloud.run.v2.TrafficTargetStatus getDefaultInstanceForType() { return com.google.cloud.run.v2.TrafficTargetStatus.getDefaultInstance(); } @java.lang.Override public com.google.cloud.run.v2.TrafficTargetStatus build() { com.google.cloud.run.v2.TrafficTargetStatus result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.run.v2.TrafficTargetStatus buildPartial() { com.google.cloud.run.v2.TrafficTargetStatus result = new com.google.cloud.run.v2.TrafficTargetStatus(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.run.v2.TrafficTargetStatus result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.type_ = type_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.revision_ = revision_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.percent_ = percent_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.tag_ = tag_; } if (((from_bitField0_ & 0x00000010) != 0)) { result.uri_ = uri_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.run.v2.TrafficTargetStatus) { return mergeFrom((com.google.cloud.run.v2.TrafficTargetStatus) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.run.v2.TrafficTargetStatus other) { if (other == com.google.cloud.run.v2.TrafficTargetStatus.getDefaultInstance()) return this; if (other.type_ != 0) { setTypeValue(other.getTypeValue()); } if (!other.getRevision().isEmpty()) { revision_ = other.revision_; bitField0_ |= 0x00000002; onChanged(); } if (other.getPercent() != 0) { setPercent(other.getPercent()); } if (!other.getTag().isEmpty()) { tag_ = other.tag_; bitField0_ |= 0x00000008; onChanged(); } if (!other.getUri().isEmpty()) { uri_ = other.uri_; bitField0_ |= 0x00000010; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { type_ = input.readEnum(); bitField0_ |= 0x00000001; break; } // case 8 case 18: { revision_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { percent_ = input.readInt32(); bitField0_ |= 0x00000004; break; } // case 24 case 34: { tag_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 case 42: { uri_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000010; break; } // case 42 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int type_ = 0; /** * * * <pre> * The allocation type for this traffic target. * </pre> * * <code>.google.cloud.run.v2.TrafficTargetAllocationType type = 1;</code> * * @return The enum numeric value on the wire for type. */ @java.lang.Override public int getTypeValue() { return type_; } /** * * * <pre> * The allocation type for this traffic target. * </pre> * * <code>.google.cloud.run.v2.TrafficTargetAllocationType type = 1;</code> * * @param value The enum numeric value on the wire for type to set. * @return This builder for chaining. */ public Builder setTypeValue(int value) { type_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The allocation type for this traffic target. * </pre> * * <code>.google.cloud.run.v2.TrafficTargetAllocationType type = 1;</code> * * @return The type. */ @java.lang.Override public com.google.cloud.run.v2.TrafficTargetAllocationType getType() { com.google.cloud.run.v2.TrafficTargetAllocationType result = com.google.cloud.run.v2.TrafficTargetAllocationType.forNumber(type_); return result == null ? com.google.cloud.run.v2.TrafficTargetAllocationType.UNRECOGNIZED : result; } /** * * * <pre> * The allocation type for this traffic target. * </pre> * * <code>.google.cloud.run.v2.TrafficTargetAllocationType type = 1;</code> * * @param value The type to set. * @return This builder for chaining. */ public Builder setType(com.google.cloud.run.v2.TrafficTargetAllocationType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; type_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * The allocation type for this traffic target. * </pre> * * <code>.google.cloud.run.v2.TrafficTargetAllocationType type = 1;</code> * * @return This builder for chaining. */ public Builder clearType() { bitField0_ = (bitField0_ & ~0x00000001); type_ = 0; onChanged(); return this; } private java.lang.Object revision_ = ""; /** * * * <pre> * Revision to which this traffic is sent. * </pre> * * <code>string revision = 2 [(.google.api.resource_reference) = { ... }</code> * * @return The revision. */ public java.lang.String getRevision() { java.lang.Object ref = revision_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); revision_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Revision to which this traffic is sent. * </pre> * * <code>string revision = 2 [(.google.api.resource_reference) = { ... }</code> * * @return The bytes for revision. */ public com.google.protobuf.ByteString getRevisionBytes() { java.lang.Object ref = revision_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); revision_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Revision to which this traffic is sent. * </pre> * * <code>string revision = 2 [(.google.api.resource_reference) = { ... }</code> * * @param value The revision to set. * @return This builder for chaining. */ public Builder setRevision(java.lang.String value) { if (value == null) { throw new NullPointerException(); } revision_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Revision to which this traffic is sent. * </pre> * * <code>string revision = 2 [(.google.api.resource_reference) = { ... }</code> * * @return This builder for chaining. */ public Builder clearRevision() { revision_ = getDefaultInstance().getRevision(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Revision to which this traffic is sent. * </pre> * * <code>string revision = 2 [(.google.api.resource_reference) = { ... }</code> * * @param value The bytes for revision to set. * @return This builder for chaining. */ public Builder setRevisionBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); revision_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private int percent_; /** * * * <pre> * Specifies percent of the traffic to this Revision. * </pre> * * <code>int32 percent = 3;</code> * * @return The percent. */ @java.lang.Override public int getPercent() { return percent_; } /** * * * <pre> * Specifies percent of the traffic to this Revision. * </pre> * * <code>int32 percent = 3;</code> * * @param value The percent to set. * @return This builder for chaining. */ public Builder setPercent(int value) { percent_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Specifies percent of the traffic to this Revision. * </pre> * * <code>int32 percent = 3;</code> * * @return This builder for chaining. */ public Builder clearPercent() { bitField0_ = (bitField0_ & ~0x00000004); percent_ = 0; onChanged(); return this; } private java.lang.Object tag_ = ""; /** * * * <pre> * Indicates the string used in the URI to exclusively reference this target. * </pre> * * <code>string tag = 4;</code> * * @return The tag. */ public java.lang.String getTag() { java.lang.Object ref = tag_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); tag_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Indicates the string used in the URI to exclusively reference this target. * </pre> * * <code>string tag = 4;</code> * * @return The bytes for tag. */ public com.google.protobuf.ByteString getTagBytes() { java.lang.Object ref = tag_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); tag_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Indicates the string used in the URI to exclusively reference this target. * </pre> * * <code>string tag = 4;</code> * * @param value The tag to set. * @return This builder for chaining. */ public Builder setTag(java.lang.String value) { if (value == null) { throw new NullPointerException(); } tag_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Indicates the string used in the URI to exclusively reference this target. * </pre> * * <code>string tag = 4;</code> * * @return This builder for chaining. */ public Builder clearTag() { tag_ = getDefaultInstance().getTag(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Indicates the string used in the URI to exclusively reference this target. * </pre> * * <code>string tag = 4;</code> * * @param value The bytes for tag to set. * @return This builder for chaining. */ public Builder setTagBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); tag_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } private java.lang.Object uri_ = ""; /** * * * <pre> * Displays the target URI. * </pre> * * <code>string uri = 5;</code> * * @return The uri. */ public java.lang.String getUri() { java.lang.Object ref = uri_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); uri_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Displays the target URI. * </pre> * * <code>string uri = 5;</code> * * @return The bytes for uri. */ public com.google.protobuf.ByteString getUriBytes() { java.lang.Object ref = uri_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); uri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Displays the target URI. * </pre> * * <code>string uri = 5;</code> * * @param value The uri to set. * @return This builder for chaining. */ public Builder setUri(java.lang.String value) { if (value == null) { throw new NullPointerException(); } uri_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } /** * * * <pre> * Displays the target URI. * </pre> * * <code>string uri = 5;</code> * * @return This builder for chaining. */ public Builder clearUri() { uri_ = getDefaultInstance().getUri(); bitField0_ = (bitField0_ & ~0x00000010); onChanged(); return this; } /** * * * <pre> * Displays the target URI. * </pre> * * <code>string uri = 5;</code> * * @param value The bytes for uri to set. * @return This builder for chaining. */ public Builder setUriBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); uri_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.run.v2.TrafficTargetStatus) } // @@protoc_insertion_point(class_scope:google.cloud.run.v2.TrafficTargetStatus) private static final com.google.cloud.run.v2.TrafficTargetStatus DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.run.v2.TrafficTargetStatus(); } public static com.google.cloud.run.v2.TrafficTargetStatus getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<TrafficTargetStatus> PARSER = new com.google.protobuf.AbstractParser<TrafficTargetStatus>() { @java.lang.Override public TrafficTargetStatus parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<TrafficTargetStatus> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<TrafficTargetStatus> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.run.v2.TrafficTargetStatus getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/gravitino
36,187
server/src/test/java/org/apache/gravitino/server/web/rest/TestStatisticOperations.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.gravitino.server.web.rest; import static javax.ws.rs.client.Entity.entity; import static org.apache.gravitino.Configs.TREE_LOCK_CLEAN_INTERVAL; import static org.apache.gravitino.Configs.TREE_LOCK_MAX_NODE_IN_MEMORY; import static org.apache.gravitino.Configs.TREE_LOCK_MIN_NODE_IN_MEMORY; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import java.io.IOException; import java.time.Instant; import java.util.List; import java.util.Map; import java.util.Optional; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.core.Application; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import org.apache.commons.lang3.reflect.FieldUtils; import org.apache.gravitino.Config; import org.apache.gravitino.GravitinoEnv; import org.apache.gravitino.MetadataObject; import org.apache.gravitino.MetadataObjects; import org.apache.gravitino.catalog.TableDispatcher; import org.apache.gravitino.dto.requests.PartitionStatisticsDropRequest; import org.apache.gravitino.dto.requests.PartitionStatisticsUpdateRequest; import org.apache.gravitino.dto.requests.StatisticsDropRequest; import org.apache.gravitino.dto.requests.StatisticsUpdateRequest; import org.apache.gravitino.dto.responses.BaseResponse; import org.apache.gravitino.dto.responses.DropResponse; import org.apache.gravitino.dto.responses.ErrorConstants; import org.apache.gravitino.dto.responses.ErrorResponse; import org.apache.gravitino.dto.responses.PartitionStatisticsListResponse; import org.apache.gravitino.dto.responses.StatisticListResponse; import org.apache.gravitino.dto.stats.PartitionStatisticsDropDTO; import org.apache.gravitino.dto.stats.PartitionStatisticsUpdateDTO; import org.apache.gravitino.dto.stats.StatisticDTO; import org.apache.gravitino.dto.util.DTOConverters; import org.apache.gravitino.exceptions.IllegalStatisticNameException; import org.apache.gravitino.exceptions.NoSuchMetadataObjectException; import org.apache.gravitino.exceptions.UnmodifiableStatisticException; import org.apache.gravitino.lock.LockManager; import org.apache.gravitino.meta.AuditInfo; import org.apache.gravitino.rest.RESTUtils; import org.apache.gravitino.stats.PartitionRange; import org.apache.gravitino.stats.PartitionStatistics; import org.apache.gravitino.stats.Statistic; import org.apache.gravitino.stats.StatisticManager; import org.apache.gravitino.stats.StatisticValue; import org.apache.gravitino.stats.StatisticValues; import org.glassfish.jersey.internal.inject.AbstractBinder; import org.glassfish.jersey.server.ResourceConfig; import org.glassfish.jersey.test.JerseyTest; import org.glassfish.jersey.test.TestProperties; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import org.mockito.Mockito; public class TestStatisticOperations extends JerseyTest { private static class MockServletRequestFactory extends ServletRequestFactoryBase { @Override public HttpServletRequest get() { HttpServletRequest request = mock(HttpServletRequest.class); when(request.getRemoteUser()).thenReturn(null); return request; } } private static TableDispatcher tableDispatcher = mock(TableDispatcher.class); private StatisticManager manager = mock(StatisticManager.class); private final String metalake = "metalake1"; private final String catalog = "catalog1"; private final String schema = "schema1"; private final String table = "table1"; @BeforeAll public static void setup() throws IllegalAccessException { Config config = mock(Config.class); Mockito.doReturn(100000L).when(config).get(TREE_LOCK_MAX_NODE_IN_MEMORY); Mockito.doReturn(1000L).when(config).get(TREE_LOCK_MIN_NODE_IN_MEMORY); Mockito.doReturn(36000L).when(config).get(TREE_LOCK_CLEAN_INTERVAL); FieldUtils.writeField(GravitinoEnv.getInstance(), "lockManager", new LockManager(config), true); FieldUtils.writeField(GravitinoEnv.getInstance(), "tableDispatcher", tableDispatcher, true); } @Override protected Application configure() { try { forceSet( TestProperties.CONTAINER_PORT, String.valueOf(RESTUtils.findAvailablePort(2000, 3000))); } catch (IOException e) { throw new RuntimeException(e); } ResourceConfig resourceConfig = new ResourceConfig(); resourceConfig.register(StatisticOperations.class); resourceConfig.register( new AbstractBinder() { @Override protected void configure() { bind(manager).to(StatisticManager.class).ranked(2); bindFactory(MockServletRequestFactory.class).to(HttpServletRequest.class); } }); return resourceConfig; } @Test public void testListTableStatistics() { AuditInfo auditInfo = AuditInfo.builder() .withCreateTime(Instant.now()) .withCreator("test") .withLastModifiedTime(Instant.now()) .withLastModifier("test") .build(); Statistic stat1 = new StatisticManager.CustomStatistic( "test", StatisticValues.stringValue("test"), auditInfo); Statistic stat2 = new StatisticManager.CustomStatistic("test2", StatisticValues.longValue(1L), auditInfo); when(manager.listStatistics(any(), any())).thenReturn(Lists.newArrayList(stat1, stat2)); when(tableDispatcher.tableExists(any())).thenReturn(true); MetadataObject tableObject = MetadataObjects.parse( String.format("%s.%s.%s", catalog, schema, table), MetadataObject.Type.TABLE); Response resp = target( "/metalakes/" + metalake + "/objects/" + tableObject.type() + "/" + tableObject.fullName() + "/statistics") .request(MediaType.APPLICATION_JSON_TYPE) .accept("application/vnd.gravitino.v1+json") .get(); Assertions.assertEquals(Response.Status.OK.getStatusCode(), resp.getStatus()); Assertions.assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getMediaType()); StatisticListResponse listResp = resp.readEntity(StatisticListResponse.class); listResp.validate(); Assertions.assertEquals(0, listResp.getCode()); StatisticDTO[] statisticDTOS = listResp.getStatistics(); Assertions.assertEquals(2, statisticDTOS.length); Assertions.assertEquals(stat1.name(), statisticDTOS[0].name()); Assertions.assertEquals(DTOConverters.toDTO(auditInfo), statisticDTOS[0].auditInfo()); Assertions.assertEquals(stat1.value().get(), statisticDTOS[0].value().get()); Assertions.assertEquals(stat2.name(), statisticDTOS[1].name()); Assertions.assertEquals(stat2.value().get(), statisticDTOS[1].value().get()); Assertions.assertEquals(DTOConverters.toDTO(auditInfo), statisticDTOS[1].auditInfo()); // Test throw NoSuchMetadataObjectException when(tableDispatcher.tableExists(any())).thenReturn(false); Response resp1 = target( "/metalakes/" + metalake + "/objects/" + tableObject.type() + "/" + tableObject.fullName() + "/statistics") .request(MediaType.APPLICATION_JSON_TYPE) .accept("application/vnd.gravitino.v1+json") .get(); Assertions.assertEquals(Response.Status.NOT_FOUND.getStatusCode(), resp1.getStatus()); Assertions.assertEquals(MediaType.APPLICATION_JSON_TYPE, resp1.getMediaType()); ErrorResponse errorResp = resp1.readEntity(ErrorResponse.class); Assertions.assertEquals(ErrorConstants.NOT_FOUND_CODE, errorResp.getCode()); Assertions.assertEquals( NoSuchMetadataObjectException.class.getSimpleName(), errorResp.getType()); // Test throw RuntimeException when(tableDispatcher.tableExists(any())).thenReturn(true); doThrow(new RuntimeException("mock error")).when(manager).listStatistics(any(), any()); Response resp2 = target( "/metalakes/" + metalake + "/objects/" + tableObject.type() + "/" + tableObject.fullName() + "/statistics") .request(MediaType.APPLICATION_JSON_TYPE) .accept("application/vnd.gravitino.v1+json") .get(); Assertions.assertEquals( Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), resp2.getStatus()); Assertions.assertEquals(MediaType.APPLICATION_JSON_TYPE, resp2.getMediaType()); ErrorResponse errorResp2 = resp2.readEntity(ErrorResponse.class); Assertions.assertEquals(ErrorConstants.INTERNAL_ERROR_CODE, errorResp2.getCode()); Assertions.assertEquals(RuntimeException.class.getSimpleName(), errorResp2.getType()); } @Test public void testUpdateTableStatistics() { Map<String, StatisticValue<?>> statsMap = Maps.newHashMap(); statsMap.put(Statistic.CUSTOM_PREFIX + "test1", StatisticValues.stringValue("test")); statsMap.put(Statistic.CUSTOM_PREFIX + "test2", StatisticValues.longValue(1L)); StatisticsUpdateRequest req = new StatisticsUpdateRequest(statsMap); MetadataObject tableObject = MetadataObjects.parse( String.format("%s.%s.%s", catalog, schema, table), MetadataObject.Type.TABLE); when(tableDispatcher.tableExists(any())).thenReturn(true); Response resp = target( "/metalakes/" + metalake + "/objects/" + tableObject.type() + "/" + tableObject.fullName() + "/statistics") .request(MediaType.APPLICATION_JSON_TYPE) .accept("application/vnd.gravitino.v1+json") .put(entity(req, MediaType.APPLICATION_JSON_TYPE)); Assertions.assertEquals(Response.Status.OK.getStatusCode(), resp.getStatus()); Assertions.assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getMediaType()); BaseResponse updateResp = resp.readEntity(BaseResponse.class); Assertions.assertEquals(0, updateResp.getCode()); // Test throw NoSuchMetadataObjectException when(tableDispatcher.tableExists(any())).thenReturn(false); Response resp1 = target( "/metalakes/" + metalake + "/objects/" + tableObject.type() + "/" + tableObject.fullName() + "/statistics") .request(MediaType.APPLICATION_JSON_TYPE) .accept("application/vnd.gravitino.v1+json") .put(entity(req, MediaType.APPLICATION_JSON_TYPE)); Assertions.assertEquals(Response.Status.NOT_FOUND.getStatusCode(), resp1.getStatus()); Assertions.assertEquals(MediaType.APPLICATION_JSON_TYPE, resp1.getMediaType()); ErrorResponse errorResp = resp1.readEntity(ErrorResponse.class); Assertions.assertEquals(ErrorConstants.NOT_FOUND_CODE, errorResp.getCode()); Assertions.assertEquals( NoSuchMetadataObjectException.class.getSimpleName(), errorResp.getType()); // Test throw RuntimeException when(tableDispatcher.tableExists(any())).thenReturn(true); doThrow(new RuntimeException("mock error")).when(manager).updateStatistics(any(), any(), any()); Response resp2 = target( "/metalakes/" + metalake + "/objects/" + tableObject.type() + "/" + tableObject.fullName() + "/statistics") .request(MediaType.APPLICATION_JSON_TYPE) .accept("application/vnd.gravitino.v1+json") .put(entity(req, MediaType.APPLICATION_JSON_TYPE)); Assertions.assertEquals( Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), resp2.getStatus()); Assertions.assertEquals(MediaType.APPLICATION_JSON_TYPE, resp2.getMediaType()); ErrorResponse errorResp2 = resp2.readEntity(ErrorResponse.class); Assertions.assertEquals(ErrorConstants.INTERNAL_ERROR_CODE, errorResp2.getCode()); Assertions.assertEquals(RuntimeException.class.getSimpleName(), errorResp2.getType()); // Test throw IllegalStatisticNameException statsMap.put("test1", StatisticValues.longValue(1L)); req = new StatisticsUpdateRequest(statsMap); Response resp3 = target( "/metalakes/" + metalake + "/objects/" + tableObject.type() + "/" + tableObject.fullName() + "/statistics") .request(MediaType.APPLICATION_JSON_TYPE) .accept("application/vnd.gravitino.v1+json") .put(entity(req, MediaType.APPLICATION_JSON_TYPE)); Assertions.assertEquals(Response.Status.BAD_REQUEST.getStatusCode(), resp3.getStatus()); Assertions.assertEquals(MediaType.APPLICATION_JSON_TYPE, resp3.getMediaType()); ErrorResponse errorResp3 = resp3.readEntity(ErrorResponse.class); Assertions.assertEquals(ErrorConstants.ILLEGAL_ARGUMENTS_CODE, errorResp3.getCode()); Assertions.assertEquals( IllegalStatisticNameException.class.getSimpleName(), errorResp3.getType()); // Test throw UnmodifiableStatisticException doThrow(new UnmodifiableStatisticException("mock error")) .when(manager) .updateStatistics(any(), any(), any()); statsMap.clear(); statsMap.put(Statistic.CUSTOM_PREFIX + "test1", StatisticValues.stringValue("test")); req = new StatisticsUpdateRequest(statsMap); Response resp4 = target( "/metalakes/" + metalake + "/objects/" + tableObject.type() + "/" + tableObject.fullName() + "/statistics") .request(MediaType.APPLICATION_JSON_TYPE) .accept("application/vnd.gravitino.v1+json") .put(entity(req, MediaType.APPLICATION_JSON_TYPE)); Assertions.assertEquals(Response.Status.METHOD_NOT_ALLOWED.getStatusCode(), resp4.getStatus()); Assertions.assertEquals(MediaType.APPLICATION_JSON_TYPE, resp4.getMediaType()); ErrorResponse errorResp4 = resp4.readEntity(ErrorResponse.class); Assertions.assertEquals(ErrorConstants.UNSUPPORTED_OPERATION_CODE, errorResp4.getCode()); Assertions.assertEquals( UnmodifiableStatisticException.class.getSimpleName(), errorResp4.getType()); } @Test public void testDropTableStatistics() { StatisticsDropRequest req = new StatisticsDropRequest(new String[] {"test1", "test2"}); when(manager.dropStatistics(any(), any(), any())).thenReturn(true); when(tableDispatcher.tableExists(any())).thenReturn(true); MetadataObject tableObject = MetadataObjects.parse( String.format("%s.%s.%s", catalog, schema, table), MetadataObject.Type.TABLE); Response resp = target( "/metalakes/" + metalake + "/objects/" + tableObject.type() + "/" + tableObject.fullName() + "/statistics") .request(MediaType.APPLICATION_JSON_TYPE) .accept("application/vnd.gravitino.v1+json") .post(entity(req, MediaType.APPLICATION_JSON_TYPE)); Assertions.assertEquals(Response.Status.OK.getStatusCode(), resp.getStatus()); Assertions.assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getMediaType()); DropResponse dropResp = resp.readEntity(DropResponse.class); Assertions.assertEquals(0, dropResp.getCode()); Assertions.assertTrue(dropResp.dropped()); // Test throw NoSuchMetadataObjectException when(tableDispatcher.tableExists(any())).thenReturn(false); Response resp1 = target( "/metalakes/" + metalake + "/objects/" + tableObject.type() + "/" + tableObject.fullName() + "/statistics") .request(MediaType.APPLICATION_JSON_TYPE) .accept("application/vnd.gravitino.v1+json") .post(entity(req, MediaType.APPLICATION_JSON_TYPE)); Assertions.assertEquals(Response.Status.NOT_FOUND.getStatusCode(), resp1.getStatus()); Assertions.assertEquals(MediaType.APPLICATION_JSON_TYPE, resp1.getMediaType()); ErrorResponse errorResp = resp1.readEntity(ErrorResponse.class); Assertions.assertEquals(ErrorConstants.NOT_FOUND_CODE, errorResp.getCode()); Assertions.assertEquals( NoSuchMetadataObjectException.class.getSimpleName(), errorResp.getType()); // Test throw RuntimeException when(tableDispatcher.tableExists(any())).thenReturn(true); doThrow(new RuntimeException("mock error")).when(manager).dropStatistics(any(), any(), any()); Response resp2 = target( "/metalakes/" + metalake + "/objects/" + tableObject.type() + "/" + tableObject.fullName() + "/statistics") .request(MediaType.APPLICATION_JSON_TYPE) .accept("application/vnd.gravitino.v1+json") .post(entity(req, MediaType.APPLICATION_JSON_TYPE)); Assertions.assertEquals( Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), resp2.getStatus()); Assertions.assertEquals(MediaType.APPLICATION_JSON_TYPE, resp2.getMediaType()); ErrorResponse errorResp2 = resp2.readEntity(ErrorResponse.class); Assertions.assertEquals(ErrorConstants.INTERNAL_ERROR_CODE, errorResp2.getCode()); Assertions.assertEquals(RuntimeException.class.getSimpleName(), errorResp2.getType()); // Test throw UnmodifiableStatisticException doThrow(new UnmodifiableStatisticException("mock error")) .when(manager) .dropStatistics(any(), any(), any()); Response resp3 = target( "/metalakes/" + metalake + "/objects/" + tableObject.type() + "/" + tableObject.fullName() + "/statistics") .request(MediaType.APPLICATION_JSON_TYPE) .accept("application/vnd.gravitino.v1+json") .post(entity(req, MediaType.APPLICATION_JSON_TYPE)); Assertions.assertEquals(Response.Status.METHOD_NOT_ALLOWED.getStatusCode(), resp3.getStatus()); Assertions.assertEquals(MediaType.APPLICATION_JSON_TYPE, resp3.getMediaType()); ErrorResponse errorResp3 = resp3.readEntity(ErrorResponse.class); Assertions.assertEquals(ErrorConstants.UNSUPPORTED_OPERATION_CODE, errorResp3.getCode()); Assertions.assertEquals( UnmodifiableStatisticException.class.getSimpleName(), errorResp3.getType()); } @Test public void testListPartitionStatistics() { AuditInfo auditInfo = AuditInfo.builder() .withCreateTime(Instant.now()) .withCreator("test") .withLastModifiedTime(Instant.now()) .withLastModifier("test") .build(); StatisticDTO stat1 = StatisticDTO.builder() .withName("test1") .withValue(Optional.of(StatisticValues.stringValue("test"))) .withReserved(true) .withModifiable(false) .withAudit(DTOConverters.toDTO(auditInfo)) .build(); StatisticDTO stat2 = StatisticDTO.builder() .withName("test1") .withValue(Optional.of(StatisticValues.longValue(1L))) .withReserved(true) .withModifiable(false) .withAudit(DTOConverters.toDTO(auditInfo)) .build(); PartitionStatistics partitionStatistics = new StatisticManager.CustomPartitionStatistic("partition1", new Statistic[] {stat1, stat2}); MetadataObject tableObject = MetadataObjects.parse( String.format("%s.%s.%s", catalog, schema, table), MetadataObject.Type.TABLE); when(manager.listPartitionStatistics(any(), any(), any())) .thenReturn(Lists.newArrayList(partitionStatistics)); when(tableDispatcher.tableExists(any())).thenReturn(true); Response resp = target( "/metalakes/" + metalake + "/objects/" + tableObject.type() + "/" + tableObject.fullName() + "/statistics/partitions") .queryParam("from", "p0") .request(MediaType.APPLICATION_JSON_TYPE) .accept("application/vnd.gravitino.v1+json") .get(); Assertions.assertEquals(Response.Status.OK.getStatusCode(), resp.getStatus()); Assertions.assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getMediaType()); PartitionStatisticsListResponse listResp = resp.readEntity(PartitionStatisticsListResponse.class); Assertions.assertEquals(0, listResp.getCode()); Statistic[] statisticDTOS = listResp.getPartitionStatistics()[0].statistics(); Assertions.assertEquals(2, statisticDTOS.length); Assertions.assertEquals(stat1.name(), statisticDTOS[0].name()); Assertions.assertEquals(stat1.value().get(), statisticDTOS[0].value().get()); Assertions.assertEquals(stat2.name(), statisticDTOS[1].name()); Assertions.assertEquals(stat2.value().get(), statisticDTOS[1].value().get()); // Test throw NoSuchMetadataObjectException when(tableDispatcher.tableExists(any())).thenReturn(false); Response resp1 = target( "/metalakes/" + metalake + "/objects/" + tableObject.type() + "/" + tableObject.fullName() + "/statistics/partitions") .queryParam("from", "p0") .request(MediaType.APPLICATION_JSON_TYPE) .accept("application/vnd.gravitino.v1+json") .get(); Assertions.assertEquals(Response.Status.NOT_FOUND.getStatusCode(), resp1.getStatus()); Assertions.assertEquals(MediaType.APPLICATION_JSON_TYPE, resp1.getMediaType()); ErrorResponse errorResp = resp1.readEntity(ErrorResponse.class); Assertions.assertEquals(ErrorConstants.NOT_FOUND_CODE, errorResp.getCode()); Assertions.assertEquals( NoSuchMetadataObjectException.class.getSimpleName(), errorResp.getType()); // Test throw RuntimeException when(tableDispatcher.tableExists(any())).thenReturn(true); doThrow(new RuntimeException("mock error")) .when(manager) .listPartitionStatistics(any(), any(), any()); Response resp2 = target( "/metalakes/" + metalake + "/objects/" + tableObject.type() + "/" + tableObject.fullName() + "/statistics/partitions") .queryParam("from", "p0") .request(MediaType.APPLICATION_JSON_TYPE) .accept("application/vnd.gravitino.v1+json") .get(); Assertions.assertEquals( Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), resp2.getStatus()); Assertions.assertEquals(MediaType.APPLICATION_JSON_TYPE, resp2.getMediaType()); ErrorResponse errorResp2 = resp2.readEntity(ErrorResponse.class); Assertions.assertEquals(ErrorConstants.INTERNAL_ERROR_CODE, errorResp2.getCode()); Assertions.assertEquals(RuntimeException.class.getSimpleName(), errorResp2.getType()); } @Test public void testUpdatePartitionStatistics() { Map<String, StatisticValue<?>> statsMap = Maps.newHashMap(); statsMap.put(Statistic.CUSTOM_PREFIX + "test1", StatisticValues.stringValue("test")); statsMap.put(Statistic.CUSTOM_PREFIX + "test2", StatisticValues.longValue(1L)); List<PartitionStatisticsUpdateDTO> partitionStatsList = Lists.newArrayList(); partitionStatsList.add(PartitionStatisticsUpdateDTO.of("partition1", statsMap)); MetadataObject tableObject = MetadataObjects.parse( String.format("%s.%s.%s", catalog, schema, table), MetadataObject.Type.TABLE); PartitionStatisticsUpdateRequest req = new PartitionStatisticsUpdateRequest(partitionStatsList); when(tableDispatcher.tableExists(any())).thenReturn(true); Response resp = target( "/metalakes/" + metalake + "/objects/" + tableObject.type() + "/" + tableObject.fullName() + "/statistics/partitions") .request(MediaType.APPLICATION_JSON_TYPE) .accept("application/vnd.gravitino.v1+json") .put(entity(req, MediaType.APPLICATION_JSON_TYPE)); Assertions.assertEquals(Response.Status.OK.getStatusCode(), resp.getStatus()); Assertions.assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getMediaType()); Assertions.assertEquals(Response.Status.OK.getStatusCode(), resp.getStatus()); Assertions.assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getMediaType()); BaseResponse updateResp = resp.readEntity(BaseResponse.class); Assertions.assertEquals(0, updateResp.getCode()); // Test throw NoSuchMetadataObjectException when(tableDispatcher.tableExists(any())).thenReturn(false); Response resp1 = target( "/metalakes/" + metalake + "/objects/" + tableObject.type() + "/" + tableObject.fullName() + "/statistics/partitions") .request(MediaType.APPLICATION_JSON_TYPE) .accept("application/vnd.gravitino.v1+json") .put(entity(req, MediaType.APPLICATION_JSON_TYPE)); Assertions.assertEquals(Response.Status.NOT_FOUND.getStatusCode(), resp1.getStatus()); Assertions.assertEquals(MediaType.APPLICATION_JSON_TYPE, resp1.getMediaType()); ErrorResponse errorResp = resp1.readEntity(ErrorResponse.class); Assertions.assertEquals(ErrorConstants.NOT_FOUND_CODE, errorResp.getCode()); Assertions.assertEquals( NoSuchMetadataObjectException.class.getSimpleName(), errorResp.getType()); when(tableDispatcher.tableExists(any())).thenReturn(true); // Test throw RuntimeException doThrow(new RuntimeException("mock error")) .when(manager) .updatePartitionStatistics(any(), any(), any()); Response resp2 = target( "/metalakes/" + metalake + "/objects/" + tableObject.type() + "/" + tableObject.fullName() + "/statistics/partitions") .request(MediaType.APPLICATION_JSON_TYPE) .accept("application/vnd.gravitino.v1+json") .put(entity(req, MediaType.APPLICATION_JSON_TYPE)); Assertions.assertEquals( Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), resp2.getStatus()); Assertions.assertEquals(MediaType.APPLICATION_JSON_TYPE, resp2.getMediaType()); ErrorResponse errorResp2 = resp2.readEntity(ErrorResponse.class); Assertions.assertEquals(ErrorConstants.INTERNAL_ERROR_CODE, errorResp2.getCode()); Assertions.assertEquals(RuntimeException.class.getSimpleName(), errorResp2.getType()); // Test throw IllegalStatisticNameException statsMap.put("test1", StatisticValues.longValue(1L)); req = new PartitionStatisticsUpdateRequest(partitionStatsList); Response resp3 = target( "/metalakes/" + metalake + "/objects/" + tableObject.type() + "/" + tableObject.fullName() + "/statistics/partitions") .request(MediaType.APPLICATION_JSON_TYPE) .accept("application/vnd.gravitino.v1+json") .put(entity(req, MediaType.APPLICATION_JSON_TYPE)); Assertions.assertEquals(Response.Status.BAD_REQUEST.getStatusCode(), resp3.getStatus()); Assertions.assertEquals(MediaType.APPLICATION_JSON_TYPE, resp3.getMediaType()); ErrorResponse errorResp3 = resp3.readEntity(ErrorResponse.class); Assertions.assertEquals(ErrorConstants.ILLEGAL_ARGUMENTS_CODE, errorResp3.getCode()); Assertions.assertEquals( IllegalStatisticNameException.class.getSimpleName(), errorResp3.getType()); // Test throw UnmodifiableStatisticException statsMap.clear(); statsMap.put(Statistic.CUSTOM_PREFIX + "test1", StatisticValues.longValue(1L)); doThrow(new UnmodifiableStatisticException("mock error")) .when(manager) .updatePartitionStatistics(any(), any(), any()); req = new PartitionStatisticsUpdateRequest(partitionStatsList); Response resp4 = target( "/metalakes/" + metalake + "/objects/" + tableObject.type() + "/" + tableObject.fullName() + "/statistics/partitions") .request(MediaType.APPLICATION_JSON_TYPE) .accept("application/vnd.gravitino.v1+json") .put(entity(req, MediaType.APPLICATION_JSON_TYPE)); Assertions.assertEquals(Response.Status.METHOD_NOT_ALLOWED.getStatusCode(), resp4.getStatus()); Assertions.assertEquals(MediaType.APPLICATION_JSON_TYPE, resp4.getMediaType()); ErrorResponse errorResp4 = resp4.readEntity(ErrorResponse.class); Assertions.assertEquals(ErrorConstants.UNSUPPORTED_OPERATION_CODE, errorResp4.getCode()); Assertions.assertEquals( UnmodifiableStatisticException.class.getSimpleName(), errorResp4.getType()); } @Test public void testDropPartitionStatistics() { List<PartitionStatisticsDropDTO> partitionStatistics = Lists.newArrayList(); partitionStatistics.add( PartitionStatisticsDropDTO.of("partition1", Lists.newArrayList("stat1", "stat2"))); PartitionStatisticsDropRequest req = new PartitionStatisticsDropRequest(partitionStatistics); when(manager.dropPartitionStatistics(any(), any(), any())).thenReturn(true); when(tableDispatcher.tableExists(any())).thenReturn(true); MetadataObject tableObject = MetadataObjects.parse( String.format("%s.%s.%s", catalog, schema, table), MetadataObject.Type.TABLE); Response resp = target( "/metalakes/" + metalake + "/objects/" + tableObject.type() + "/" + tableObject.fullName() + "/statistics/partitions") .request(MediaType.APPLICATION_JSON_TYPE) .accept("application/vnd.gravitino.v1+json") .post(entity(req, MediaType.APPLICATION_JSON_TYPE)); Assertions.assertEquals(Response.Status.OK.getStatusCode(), resp.getStatus()); Assertions.assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getMediaType()); DropResponse dropResp = resp.readEntity(DropResponse.class); Assertions.assertEquals(0, dropResp.getCode()); Assertions.assertTrue(dropResp.dropped()); // Test throw NoSuchMetadataObjectExcep when(tableDispatcher.tableExists(any())).thenReturn(false); Response resp1 = target( "/metalakes/" + metalake + "/objects/" + tableObject.type() + "/" + tableObject.fullName() + "/statistics/partitions") .request(MediaType.APPLICATION_JSON_TYPE) .accept("application/vnd.gravitino.v1+json") .post(entity(req, MediaType.APPLICATION_JSON_TYPE)); Assertions.assertEquals(Response.Status.NOT_FOUND.getStatusCode(), resp1.getStatus()); Assertions.assertEquals(MediaType.APPLICATION_JSON_TYPE, resp1.getMediaType()); ErrorResponse errorResp = resp1.readEntity(ErrorResponse.class); Assertions.assertEquals(ErrorConstants.NOT_FOUND_CODE, errorResp.getCode()); Assertions.assertEquals( NoSuchMetadataObjectException.class.getSimpleName(), errorResp.getType()); // Test throw RuntimeException when(tableDispatcher.tableExists(any())).thenReturn(true); doThrow(new RuntimeException("mock error")) .when(manager) .dropPartitionStatistics(any(), any(), any()); Response resp2 = target( "/metalakes/" + metalake + "/objects/" + tableObject.type() + "/" + tableObject.fullName() + "/statistics/partitions") .request(MediaType.APPLICATION_JSON_TYPE) .accept("application/vnd.gravitino.v1+json") .post(entity(req, MediaType.APPLICATION_JSON_TYPE)); Assertions.assertEquals( Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), resp2.getStatus()); Assertions.assertEquals(MediaType.APPLICATION_JSON_TYPE, resp2.getMediaType()); ErrorResponse errorResp2 = resp2.readEntity(ErrorResponse.class); Assertions.assertEquals(ErrorConstants.INTERNAL_ERROR_CODE, errorResp2.getCode()); Assertions.assertEquals(RuntimeException.class.getSimpleName(), errorResp2.getType()); // Test throw UnmodifiableStatisticException doThrow(new UnmodifiableStatisticException("mock error")) .when(manager) .dropPartitionStatistics(any(), any(), any()); Response resp3 = target( "/metalakes/" + metalake + "/objects/" + tableObject.type() + "/" + tableObject.fullName() + "/statistics/partitions") .request(MediaType.APPLICATION_JSON_TYPE) .accept("application/vnd.gravitino.v1+json") .post(entity(req, MediaType.APPLICATION_JSON_TYPE)); Assertions.assertEquals(Response.Status.METHOD_NOT_ALLOWED.getStatusCode(), resp3.getStatus()); Assertions.assertEquals(MediaType.APPLICATION_JSON_TYPE, resp3.getMediaType()); ErrorResponse errorResp3 = resp3.readEntity(ErrorResponse.class); Assertions.assertEquals(ErrorConstants.UNSUPPORTED_OPERATION_CODE, errorResp3.getCode()); Assertions.assertEquals( UnmodifiableStatisticException.class.getSimpleName(), errorResp3.getType()); } @Test public void testGetBoundType() { Assertions.assertEquals( PartitionRange.BoundType.CLOSED, StatisticOperations.getFromBoundType(true)); Assertions.assertEquals( PartitionRange.BoundType.OPEN, StatisticOperations.getFromBoundType(false)); } }
googleapis/google-cloud-java
36,240
java-discoveryengine/proto-google-cloud-discoveryengine-v1alpha/src/main/java/com/google/cloud/discoveryengine/v1alpha/ListChunksRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/discoveryengine/v1alpha/chunk_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.discoveryengine.v1alpha; /** * * * <pre> * Request message for * [ChunkService.ListChunks][google.cloud.discoveryengine.v1alpha.ChunkService.ListChunks] * method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1alpha.ListChunksRequest} */ public final class ListChunksRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1alpha.ListChunksRequest) ListChunksRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListChunksRequest.newBuilder() to construct. private ListChunksRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListChunksRequest() { parent_ = ""; pageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListChunksRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1alpha.ChunkServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_ListChunksRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1alpha.ChunkServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_ListChunksRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1alpha.ListChunksRequest.class, com.google.cloud.discoveryengine.v1alpha.ListChunksRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent document resource name, such as * `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}/documents/{document}`. * * If the caller does not have permission to list * [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s under this document, * regardless of whether or not this document exists, a `PERMISSION_DENIED` * error is returned. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The parent document resource name, such as * `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}/documents/{document}`. * * If the caller does not have permission to list * [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s under this document, * regardless of whether or not this document exists, a `PERMISSION_DENIED` * error is returned. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_ = 0; /** * * * <pre> * Maximum number of [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s to * return. If unspecified, defaults to 100. The maximum allowed value is 1000. * Values above 1000 will be coerced to 1000. * * If this field is negative, an `INVALID_ARGUMENT` error is returned. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * A page token * [ListChunksResponse.next_page_token][google.cloud.discoveryengine.v1alpha.ListChunksResponse.next_page_token], * received from a previous * [ChunkService.ListChunks][google.cloud.discoveryengine.v1alpha.ChunkService.ListChunks] * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * [ChunkService.ListChunks][google.cloud.discoveryengine.v1alpha.ChunkService.ListChunks] * must match the call that provided the page token. Otherwise, an * `INVALID_ARGUMENT` error is returned. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * A page token * [ListChunksResponse.next_page_token][google.cloud.discoveryengine.v1alpha.ListChunksResponse.next_page_token], * received from a previous * [ChunkService.ListChunks][google.cloud.discoveryengine.v1alpha.ChunkService.ListChunks] * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * [ChunkService.ListChunks][google.cloud.discoveryengine.v1alpha.ChunkService.ListChunks] * must match the call that provided the page token. Otherwise, an * `INVALID_ARGUMENT` error is returned. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.discoveryengine.v1alpha.ListChunksRequest)) { return super.equals(obj); } com.google.cloud.discoveryengine.v1alpha.ListChunksRequest other = (com.google.cloud.discoveryengine.v1alpha.ListChunksRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.discoveryengine.v1alpha.ListChunksRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1alpha.ListChunksRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.ListChunksRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1alpha.ListChunksRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.ListChunksRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1alpha.ListChunksRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.ListChunksRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1alpha.ListChunksRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.ListChunksRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1alpha.ListChunksRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.ListChunksRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1alpha.ListChunksRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.discoveryengine.v1alpha.ListChunksRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [ChunkService.ListChunks][google.cloud.discoveryengine.v1alpha.ChunkService.ListChunks] * method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1alpha.ListChunksRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1alpha.ListChunksRequest) com.google.cloud.discoveryengine.v1alpha.ListChunksRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1alpha.ChunkServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_ListChunksRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1alpha.ChunkServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_ListChunksRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1alpha.ListChunksRequest.class, com.google.cloud.discoveryengine.v1alpha.ListChunksRequest.Builder.class); } // Construct using com.google.cloud.discoveryengine.v1alpha.ListChunksRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; pageSize_ = 0; pageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.discoveryengine.v1alpha.ChunkServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_ListChunksRequest_descriptor; } @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.ListChunksRequest getDefaultInstanceForType() { return com.google.cloud.discoveryengine.v1alpha.ListChunksRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.ListChunksRequest build() { com.google.cloud.discoveryengine.v1alpha.ListChunksRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.ListChunksRequest buildPartial() { com.google.cloud.discoveryengine.v1alpha.ListChunksRequest result = new com.google.cloud.discoveryengine.v1alpha.ListChunksRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.discoveryengine.v1alpha.ListChunksRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.discoveryengine.v1alpha.ListChunksRequest) { return mergeFrom((com.google.cloud.discoveryengine.v1alpha.ListChunksRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.discoveryengine.v1alpha.ListChunksRequest other) { if (other == com.google.cloud.discoveryengine.v1alpha.ListChunksRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent document resource name, such as * `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}/documents/{document}`. * * If the caller does not have permission to list * [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s under this document, * regardless of whether or not this document exists, a `PERMISSION_DENIED` * error is returned. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The parent document resource name, such as * `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}/documents/{document}`. * * If the caller does not have permission to list * [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s under this document, * regardless of whether or not this document exists, a `PERMISSION_DENIED` * error is returned. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The parent document resource name, such as * `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}/documents/{document}`. * * If the caller does not have permission to list * [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s under this document, * regardless of whether or not this document exists, a `PERMISSION_DENIED` * error is returned. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The parent document resource name, such as * `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}/documents/{document}`. * * If the caller does not have permission to list * [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s under this document, * regardless of whether or not this document exists, a `PERMISSION_DENIED` * error is returned. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The parent document resource name, such as * `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}/documents/{document}`. * * If the caller does not have permission to list * [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s under this document, * regardless of whether or not this document exists, a `PERMISSION_DENIED` * error is returned. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Maximum number of [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s to * return. If unspecified, defaults to 100. The maximum allowed value is 1000. * Values above 1000 will be coerced to 1000. * * If this field is negative, an `INVALID_ARGUMENT` error is returned. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Maximum number of [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s to * return. If unspecified, defaults to 100. The maximum allowed value is 1000. * Values above 1000 will be coerced to 1000. * * If this field is negative, an `INVALID_ARGUMENT` error is returned. * </pre> * * <code>int32 page_size = 2;</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Maximum number of [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s to * return. If unspecified, defaults to 100. The maximum allowed value is 1000. * Values above 1000 will be coerced to 1000. * * If this field is negative, an `INVALID_ARGUMENT` error is returned. * </pre> * * <code>int32 page_size = 2;</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000002); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * A page token * [ListChunksResponse.next_page_token][google.cloud.discoveryengine.v1alpha.ListChunksResponse.next_page_token], * received from a previous * [ChunkService.ListChunks][google.cloud.discoveryengine.v1alpha.ChunkService.ListChunks] * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * [ChunkService.ListChunks][google.cloud.discoveryengine.v1alpha.ChunkService.ListChunks] * must match the call that provided the page token. Otherwise, an * `INVALID_ARGUMENT` error is returned. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A page token * [ListChunksResponse.next_page_token][google.cloud.discoveryengine.v1alpha.ListChunksResponse.next_page_token], * received from a previous * [ChunkService.ListChunks][google.cloud.discoveryengine.v1alpha.ChunkService.ListChunks] * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * [ChunkService.ListChunks][google.cloud.discoveryengine.v1alpha.ChunkService.ListChunks] * must match the call that provided the page token. Otherwise, an * `INVALID_ARGUMENT` error is returned. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A page token * [ListChunksResponse.next_page_token][google.cloud.discoveryengine.v1alpha.ListChunksResponse.next_page_token], * received from a previous * [ChunkService.ListChunks][google.cloud.discoveryengine.v1alpha.ChunkService.ListChunks] * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * [ChunkService.ListChunks][google.cloud.discoveryengine.v1alpha.ChunkService.ListChunks] * must match the call that provided the page token. Otherwise, an * `INVALID_ARGUMENT` error is returned. * </pre> * * <code>string page_token = 3;</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * A page token * [ListChunksResponse.next_page_token][google.cloud.discoveryengine.v1alpha.ListChunksResponse.next_page_token], * received from a previous * [ChunkService.ListChunks][google.cloud.discoveryengine.v1alpha.ChunkService.ListChunks] * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * [ChunkService.ListChunks][google.cloud.discoveryengine.v1alpha.ChunkService.ListChunks] * must match the call that provided the page token. Otherwise, an * `INVALID_ARGUMENT` error is returned. * </pre> * * <code>string page_token = 3;</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * A page token * [ListChunksResponse.next_page_token][google.cloud.discoveryengine.v1alpha.ListChunksResponse.next_page_token], * received from a previous * [ChunkService.ListChunks][google.cloud.discoveryengine.v1alpha.ChunkService.ListChunks] * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * [ChunkService.ListChunks][google.cloud.discoveryengine.v1alpha.ChunkService.ListChunks] * must match the call that provided the page token. Otherwise, an * `INVALID_ARGUMENT` error is returned. * </pre> * * <code>string page_token = 3;</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1alpha.ListChunksRequest) } // @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1alpha.ListChunksRequest) private static final com.google.cloud.discoveryengine.v1alpha.ListChunksRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1alpha.ListChunksRequest(); } public static com.google.cloud.discoveryengine.v1alpha.ListChunksRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListChunksRequest> PARSER = new com.google.protobuf.AbstractParser<ListChunksRequest>() { @java.lang.Override public ListChunksRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListChunksRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListChunksRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.ListChunksRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
google/guava
36,333
android/guava-tests/test/com/google/common/collect/TableCollectionTest.java
/* * Copyright (C) 2008 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.collect; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.collect.ReflectionFreeAssertThrows.assertThrows; import static com.google.common.collect.Tables.immutableCell; import static com.google.common.collect.Tables.transformValues; import static com.google.common.collect.Tables.transpose; import static com.google.common.collect.Tables.unmodifiableRowSortedTable; import static com.google.common.collect.Tables.unmodifiableTable; import static java.util.Collections.sort; import com.google.common.annotations.GwtCompatible; import com.google.common.annotations.GwtIncompatible; import com.google.common.annotations.J2ktIncompatible; import com.google.common.base.Function; import com.google.common.base.Functions; import com.google.common.collect.Table.Cell; import com.google.common.collect.testing.CollectionTestSuiteBuilder; import com.google.common.collect.testing.MapInterfaceTest; import com.google.common.collect.testing.SampleElements; import com.google.common.collect.testing.SetTestSuiteBuilder; import com.google.common.collect.testing.SortedSetTestSuiteBuilder; import com.google.common.collect.testing.TestSetGenerator; import com.google.common.collect.testing.TestStringCollectionGenerator; import com.google.common.collect.testing.TestStringSetGenerator; import com.google.common.collect.testing.TestStringSortedSetGenerator; import com.google.common.collect.testing.features.CollectionFeature; import com.google.common.collect.testing.features.CollectionSize; import com.google.common.collect.testing.features.Feature; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedSet; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import org.jspecify.annotations.NullMarked; import org.jspecify.annotations.Nullable; /** * Collection tests for {@link Table} implementations. * * @author Jared Levy * @author Louis Wasserman */ @GwtCompatible @NullMarked public class TableCollectionTest extends TestCase { @J2ktIncompatible private static final Feature<?>[] COLLECTION_FEATURES = { CollectionSize.ANY, CollectionFeature.ALLOWS_NULL_QUERIES }; @J2ktIncompatible private static final Feature<?>[] COLLECTION_FEATURES_ORDER = { CollectionSize.ANY, CollectionFeature.KNOWN_ORDER, CollectionFeature.ALLOWS_NULL_QUERIES }; @J2ktIncompatible private static final Feature<?>[] COLLECTION_FEATURES_REMOVE = { CollectionSize.ANY, CollectionFeature.SUPPORTS_REMOVE, CollectionFeature.ALLOWS_NULL_QUERIES }; @J2ktIncompatible private static final Feature<?>[] COLLECTION_FEATURES_REMOVE_ORDER = { CollectionSize.ANY, CollectionFeature.KNOWN_ORDER, CollectionFeature.SUPPORTS_REMOVE, CollectionFeature.ALLOWS_NULL_QUERIES }; @J2ktIncompatible @GwtIncompatible // suite @AndroidIncompatible // test-suite builders public static Test suite() { TestSuite suite = new TestSuite(); // Not testing rowKeySet() or columnKeySet() of Table.transformValues() // since the transformation doesn't affect the row and column key sets. suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<String, Integer, Character> table = ArrayTable.create(ImmutableList.copyOf(elements), ImmutableList.of(1, 2)); populateForRowKeySet(table, elements); return table.rowKeySet(); } }) .named("ArrayTable.rowKeySet") .withFeatures( CollectionSize.ONE, CollectionSize.SEVERAL, CollectionFeature.KNOWN_ORDER, CollectionFeature.REJECTS_DUPLICATES_AT_CREATION, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<String, Integer, Character> table = HashBasedTable.create(); populateForRowKeySet(table, elements); return table.rowKeySet(); } }) .named("HashBasedTable.rowKeySet") .withFeatures(COLLECTION_FEATURES_REMOVE) .withFeatures(CollectionFeature.SUPPORTS_ITERATOR_REMOVE) .createTestSuite()); suite.addTest( SortedSetTestSuiteBuilder.using( new TestStringSortedSetGenerator() { @Override protected SortedSet<String> create(String[] elements) { TreeBasedTable<String, Integer, Character> table = TreeBasedTable.create(); populateForRowKeySet(table, elements); return table.rowKeySet(); } @Override public List<String> order(List<String> insertionOrder) { sort(insertionOrder); return insertionOrder; } }) .named("TreeBasedTable.rowKeySet") .withFeatures(COLLECTION_FEATURES_REMOVE_ORDER) .withFeatures(CollectionFeature.SUPPORTS_ITERATOR_REMOVE) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<String, Integer, Character> table = HashBasedTable.create(); populateForRowKeySet(table, elements); return unmodifiableTable(table).rowKeySet(); } }) .named("unmodifiableTable[HashBasedTable].rowKeySet") .withFeatures(COLLECTION_FEATURES) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { RowSortedTable<String, Integer, Character> table = TreeBasedTable.create(); populateForRowKeySet(table, elements); return unmodifiableRowSortedTable(table).rowKeySet(); } @Override public List<String> order(List<String> insertionOrder) { sort(insertionOrder); return insertionOrder; } }) .named("unmodifiableRowSortedTable[TreeBasedTable].rowKeySet") .withFeatures(COLLECTION_FEATURES_ORDER) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<Integer, String, Character> table = ArrayTable.create(ImmutableList.of(1, 2), ImmutableList.copyOf(elements)); populateForColumnKeySet(table, elements); return table.columnKeySet(); } }) .named("ArrayTable.columnKeySet") .withFeatures( CollectionSize.ONE, CollectionSize.SEVERAL, CollectionFeature.KNOWN_ORDER, CollectionFeature.REJECTS_DUPLICATES_AT_CREATION, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<Integer, String, Character> table = HashBasedTable.create(); populateForColumnKeySet(table, elements); return table.columnKeySet(); } }) .named("HashBasedTable.columnKeySet") .withFeatures(COLLECTION_FEATURES_REMOVE) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<Integer, String, Character> table = TreeBasedTable.create(); populateForColumnKeySet(table, elements); return table.columnKeySet(); } @Override public List<String> order(List<String> insertionOrder) { sort(insertionOrder); return insertionOrder; } }) .named("TreeBasedTable.columnKeySet") .withFeatures(COLLECTION_FEATURES_REMOVE_ORDER) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<Integer, String, Character> table = HashBasedTable.create(); populateForColumnKeySet(table, elements); return unmodifiableTable(table).columnKeySet(); } }) .named("unmodifiableTable[HashBasedTable].columnKeySet") .withFeatures(COLLECTION_FEATURES) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { RowSortedTable<Integer, String, Character> table = TreeBasedTable.create(); populateForColumnKeySet(table, elements); return unmodifiableRowSortedTable(table).columnKeySet(); } @Override public List<String> order(List<String> insertionOrder) { sort(insertionOrder); return insertionOrder; } }) .named("unmodifiableRowSortedTable[TreeBasedTable].columnKeySet") .withFeatures(COLLECTION_FEATURES_ORDER) .createTestSuite()); suite.addTest( CollectionTestSuiteBuilder.using( new TestStringCollectionGenerator() { @Override protected Collection<String> create(String[] elements) { List<Integer> rowKeys = new ArrayList<>(); for (int i = 0; i < elements.length; i++) { rowKeys.add(i); } Table<Integer, Character, String> table = ArrayTable.create(rowKeys, ImmutableList.of('a')); populateForValues(table, elements); return table.values(); } }) .named("ArrayTable.values") .withFeatures( CollectionSize.ONE, CollectionSize.SEVERAL, CollectionFeature.ALLOWS_NULL_VALUES, CollectionFeature.KNOWN_ORDER) .createTestSuite()); suite.addTest( CollectionTestSuiteBuilder.using( new TestStringCollectionGenerator() { @Override protected Collection<String> create(String[] elements) { Table<Integer, Character, String> table = HashBasedTable.create(); table.put(1, 'a', "foo"); table.clear(); populateForValues(table, elements); return table.values(); } }) .named("HashBasedTable.values") .withFeatures(COLLECTION_FEATURES_REMOVE) .withFeatures(CollectionFeature.SUPPORTS_ITERATOR_REMOVE) .createTestSuite()); suite.addTest( CollectionTestSuiteBuilder.using( new TestStringCollectionGenerator() { @Override protected Collection<String> create(String[] elements) { Table<Integer, Character, String> table = TreeBasedTable.create(); table.put(1, 'a', "foo"); table.clear(); populateForValues(table, elements); return table.values(); } }) .named("TreeBasedTable.values") .withFeatures(COLLECTION_FEATURES_REMOVE_ORDER) .withFeatures(CollectionFeature.SUPPORTS_ITERATOR_REMOVE) .createTestSuite()); Function<String, String> removeFirstCharacter = new Function<String, String>() { @Override public String apply(String input) { return input.substring(1); } }; suite.addTest( CollectionTestSuiteBuilder.using( new TestStringCollectionGenerator() { @Override protected Collection<String> create(String[] elements) { Table<Integer, Character, String> table = HashBasedTable.create(); for (int i = 0; i < elements.length; i++) { table.put(i, 'a', "x" + checkNotNull(elements[i])); } return transformValues(table, removeFirstCharacter).values(); } }) .named("TransformValues.values") .withFeatures(COLLECTION_FEATURES_REMOVE) .withFeatures(CollectionFeature.SUPPORTS_ITERATOR_REMOVE) .createTestSuite()); suite.addTest( CollectionTestSuiteBuilder.using( new TestStringCollectionGenerator() { @Override protected Collection<String> create(String[] elements) { Table<Integer, Character, String> table = HashBasedTable.create(); table.put(1, 'a', "foo"); table.clear(); populateForValues(table, elements); return unmodifiableTable(table).values(); } }) .named("unmodifiableTable[HashBasedTable].values") .withFeatures(COLLECTION_FEATURES) .createTestSuite()); suite.addTest( CollectionTestSuiteBuilder.using( new TestStringCollectionGenerator() { @Override protected Collection<String> create(String[] elements) { RowSortedTable<Integer, Character, String> table = TreeBasedTable.create(); table.put(1, 'a', "foo"); table.clear(); populateForValues(table, elements); return unmodifiableRowSortedTable(table).values(); } }) .named("unmodifiableTable[TreeBasedTable].values") .withFeatures(COLLECTION_FEATURES_ORDER) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestCellSetGenerator() { @Override public SampleElements<Cell<String, Integer, Character>> samples() { return new SampleElements<>( immutableCell("bar", 1, 'a'), immutableCell("bar", 2, 'b'), immutableCell("bar", 3, (Character) null), immutableCell("bar", 4, 'b'), immutableCell("bar", 5, 'b')); } @Override public Set<Cell<String, Integer, Character>> create(Object... elements) { List<Integer> columnKeys = new ArrayList<>(); for (Object element : elements) { @SuppressWarnings("unchecked") Cell<String, Integer, Character> cell = (Cell<String, Integer, Character>) element; columnKeys.add(cell.getColumnKey()); } Table<String, Integer, Character> table = ArrayTable.create(ImmutableList.of("bar"), columnKeys); for (Object element : elements) { @SuppressWarnings("unchecked") Cell<String, Integer, Character> cell = (Cell<String, Integer, Character>) element; table.put(cell.getRowKey(), cell.getColumnKey(), cell.getValue()); } return table.cellSet(); } @Override Table<String, Integer, Character> createTable() { throw new UnsupportedOperationException(); } }) .named("ArrayTable.cellSet") .withFeatures( CollectionSize.ONE, CollectionSize.SEVERAL, CollectionFeature.KNOWN_ORDER, CollectionFeature.REJECTS_DUPLICATES_AT_CREATION, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestCellSetGenerator() { @Override Table<String, Integer, Character> createTable() { return HashBasedTable.create(); } }) .named("HashBasedTable.cellSet") .withFeatures( CollectionSize.ANY, CollectionFeature.REMOVE_OPERATIONS, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestCellSetGenerator() { @Override Table<String, Integer, Character> createTable() { return TreeBasedTable.create(); } }) .named("TreeBasedTable.cellSet") .withFeatures( CollectionSize.ANY, CollectionFeature.REMOVE_OPERATIONS, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestCellSetGenerator() { @Override Table<String, Integer, Character> createTable() { Table<Integer, String, Character> original = TreeBasedTable.create(); return transpose(original); } }) .named("TransposedTable.cellSet") .withFeatures( CollectionSize.ANY, CollectionFeature.REMOVE_OPERATIONS, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestCellSetGenerator() { @Override Table<String, Integer, Character> createTable() { return HashBasedTable.create(); } @Override public Set<Cell<String, Integer, Character>> create(Object... elements) { Table<String, Integer, Character> table = createTable(); for (Object element : elements) { @SuppressWarnings("unchecked") Cell<String, Integer, Character> cell = (Cell<String, Integer, Character>) element; table.put(cell.getRowKey(), cell.getColumnKey(), cell.getValue()); } return transformValues(table, Functions.<Character>identity()).cellSet(); } }) .named("TransformValues.cellSet") .withFeatures( CollectionSize.ANY, CollectionFeature.ALLOWS_NULL_QUERIES, CollectionFeature.REMOVE_OPERATIONS) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestCellSetGenerator() { @Override Table<String, Integer, Character> createTable() { return unmodifiableTable(HashBasedTable.<String, Integer, Character>create()); } @Override public Set<Cell<String, Integer, Character>> create(Object... elements) { Table<String, Integer, Character> table = HashBasedTable.create(); for (Object element : elements) { @SuppressWarnings("unchecked") Cell<String, Integer, Character> cell = (Cell<String, Integer, Character>) element; table.put(cell.getRowKey(), cell.getColumnKey(), cell.getValue()); } return unmodifiableTable(table).cellSet(); } }) .named("unmodifiableTable[HashBasedTable].cellSet") .withFeatures(CollectionSize.ANY, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestCellSetGenerator() { @Override RowSortedTable<String, Integer, Character> createTable() { return unmodifiableRowSortedTable( TreeBasedTable.<String, Integer, Character>create()); } @Override public Set<Cell<String, Integer, Character>> create(Object... elements) { RowSortedTable<String, Integer, Character> table = TreeBasedTable.create(); for (Object element : elements) { @SuppressWarnings("unchecked") Cell<String, Integer, Character> cell = (Cell<String, Integer, Character>) element; table.put(cell.getRowKey(), cell.getColumnKey(), cell.getValue()); } return unmodifiableRowSortedTable(table).cellSet(); } }) .named("unmodifiableRowSortedTable[TreeBasedTable].cellSet") .withFeatures(CollectionSize.ANY, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Iterable<String> rowKeys = ImmutableSet.copyOf(elements); Iterable<Integer> columnKeys = ImmutableList.of(1, 2, 3); Table<String, Integer, Character> table = ArrayTable.create(rowKeys, columnKeys); populateForRowKeySet(table, elements); return table.column(1).keySet(); } }) .named("ArrayTable.column.keySet") .withFeatures( CollectionSize.ONE, CollectionSize.SEVERAL, CollectionFeature.KNOWN_ORDER, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<String, Integer, Character> table = HashBasedTable.create(); populateForRowKeySet(table, elements); return table.column(1).keySet(); } }) .named("HashBasedTable.column.keySet") .withFeatures(COLLECTION_FEATURES_REMOVE) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<String, Integer, Character> table = TreeBasedTable.create(); populateForRowKeySet(table, elements); return table.column(1).keySet(); } @Override public List<String> order(List<String> insertionOrder) { sort(insertionOrder); return insertionOrder; } }) .named("TreeBasedTable.column.keySet") .withFeatures(COLLECTION_FEATURES_REMOVE_ORDER) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<String, Integer, Character> table = HashBasedTable.create(); populateForRowKeySet(table, elements); return transformValues(table, Functions.toStringFunction()).column(1).keySet(); } }) .named("TransformValues.column.keySet") .withFeatures(COLLECTION_FEATURES_REMOVE) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { Table<String, Integer, Character> table = HashBasedTable.create(); populateForRowKeySet(table, elements); return unmodifiableTable(table).column(1).keySet(); } }) .named("unmodifiableTable[HashBasedTable].column.keySet") .withFeatures(COLLECTION_FEATURES) .createTestSuite()); suite.addTest( SetTestSuiteBuilder.using( new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { RowSortedTable<String, Integer, Character> table = TreeBasedTable.create(); populateForRowKeySet(table, elements); return unmodifiableRowSortedTable(table).column(1).keySet(); } @Override public List<String> order(List<String> insertionOrder) { sort(insertionOrder); return insertionOrder; } }) .named("unmodifiableRowSortedTable[TreeBasedTable].column.keySet") .withFeatures(COLLECTION_FEATURES_ORDER) .createTestSuite()); return suite; } private static void populateForRowKeySet( Table<String, Integer, Character> table, String[] elements) { for (String row : elements) { table.put(row, 1, 'a'); table.put(row, 2, 'b'); } } private static void populateForColumnKeySet( Table<Integer, String, Character> table, String[] elements) { for (String column : elements) { table.put(1, column, 'a'); table.put(2, column, 'b'); } } private static void populateForValues( Table<Integer, Character, String> table, String[] elements) { for (int i = 0; i < elements.length; i++) { table.put(i, 'a', elements[i]); } } @J2ktIncompatible private abstract static class TestCellSetGenerator implements TestSetGenerator<Cell<String, Integer, Character>> { @Override public SampleElements<Cell<String, Integer, Character>> samples() { return new SampleElements<>( immutableCell("bar", 1, 'a'), immutableCell("bar", 2, 'b'), immutableCell("foo", 3, 'c'), immutableCell("bar", 1, 'b'), immutableCell("cat", 2, 'b')); } @Override public Set<Cell<String, Integer, Character>> create(Object... elements) { Table<String, Integer, Character> table = createTable(); for (Object element : elements) { @SuppressWarnings("unchecked") Cell<String, Integer, Character> cell = (Cell<String, Integer, Character>) element; table.put(cell.getRowKey(), cell.getColumnKey(), cell.getValue()); } return table.cellSet(); } abstract Table<String, Integer, Character> createTable(); @Override @SuppressWarnings("unchecked") public Cell<String, Integer, Character>[] createArray(int length) { return (Cell<String, Integer, Character>[]) new Cell<?, ?, ?>[length]; } @Override public List<Cell<String, Integer, Character>> order( List<Cell<String, Integer, Character>> insertionOrder) { return insertionOrder; } } private abstract static class MapTests extends MapInterfaceTest<String, Integer> { MapTests( boolean allowsNullValues, boolean supportsPut, boolean supportsRemove, boolean supportsClear, boolean supportsIteratorRemove) { super( false, allowsNullValues, supportsPut, supportsRemove, supportsClear, supportsIteratorRemove); } @Override protected String getKeyNotInPopulatedMap() { return "four"; } @Override protected Integer getValueNotInPopulatedMap() { return 4; } } abstract static class RowTests extends MapTests { RowTests( boolean allowsNullValues, boolean supportsPut, boolean supportsRemove, boolean supportsClear, boolean supportsIteratorRemove) { super(allowsNullValues, supportsPut, supportsRemove, supportsClear, supportsIteratorRemove); } abstract Table<Character, String, Integer> makeTable(); @Override protected Map<String, Integer> makeEmptyMap() { return makeTable().row('a'); } @Override protected Map<String, Integer> makePopulatedMap() { Table<Character, String, Integer> table = makeTable(); table.put('a', "one", 1); table.put('a', "two", 2); table.put('a', "three", 3); table.put('b', "four", 4); return table.row('a'); } } static final Function<@Nullable Integer, @Nullable Integer> DIVIDE_BY_2 = new Function<@Nullable Integer, @Nullable Integer>() { @Override public @Nullable Integer apply(@Nullable Integer input) { return (input == null) ? null : input / 2; } }; abstract static class ColumnTests extends MapTests { ColumnTests( boolean allowsNullValues, boolean supportsPut, boolean supportsRemove, boolean supportsClear, boolean supportsIteratorRemove) { super(allowsNullValues, supportsPut, supportsRemove, supportsClear, supportsIteratorRemove); } abstract Table<String, Character, Integer> makeTable(); @Override protected Map<String, Integer> makeEmptyMap() { return makeTable().column('a'); } @Override protected Map<String, Integer> makePopulatedMap() { Table<String, Character, Integer> table = makeTable(); table.put("one", 'a', 1); table.put("two", 'a', 2); table.put("three", 'a', 3); table.put("four", 'b', 4); return table.column('a'); } } private abstract static class MapMapTests extends MapInterfaceTest<String, Map<Integer, Character>> { MapMapTests( boolean allowsNullValues, boolean supportsRemove, boolean supportsClear, boolean supportsIteratorRemove) { super(false, allowsNullValues, false, supportsRemove, supportsClear, supportsIteratorRemove); } @Override protected String getKeyNotInPopulatedMap() { return "cat"; } @Override protected Map<Integer, Character> getValueNotInPopulatedMap() { return ImmutableMap.of(); } /** * The version of this test supplied by {@link MapInterfaceTest} fails for this particular map * implementation, because {@code map.get()} returns a view collection that changes in the * course of a call to {@code remove()}. Thus, the expectation doesn't hold that {@code * map.remove(x)} returns the same value which {@code map.get(x)} did immediately beforehand. */ @Override public void testRemove() { Map<String, Map<Integer, Character>> map; try { map = makePopulatedMap(); } catch (UnsupportedOperationException e) { return; } String keyToRemove = map.keySet().iterator().next(); if (supportsRemove) { int initialSize = map.size(); // var oldValue = map.get(keyToRemove); map.remove(keyToRemove); // This line doesn't hold - see the Javadoc comments above. // assertEquals(expectedValue, oldValue); assertFalse(map.containsKey(keyToRemove)); assertEquals(initialSize - 1, map.size()); } else { assertThrows(UnsupportedOperationException.class, () -> map.remove(keyToRemove)); } assertInvariants(map); } } abstract static class RowMapTests extends MapMapTests { RowMapTests( boolean allowsNullValues, boolean supportsRemove, boolean supportsClear, boolean supportsIteratorRemove) { super(allowsNullValues, supportsRemove, supportsClear, supportsIteratorRemove); } abstract Table<String, Integer, Character> makeTable(); @Override protected Map<String, Map<Integer, Character>> makePopulatedMap() { Table<String, Integer, Character> table = makeTable(); populateTable(table); return table.rowMap(); } // `protected` to work around b/320650932 / KT-67447 runtime crash protected final void populateTable(Table<String, Integer, Character> table) { table.put("foo", 1, 'a'); table.put("bar", 1, 'b'); table.put("foo", 3, 'c'); } @Override protected Map<String, Map<Integer, Character>> makeEmptyMap() { return makeTable().rowMap(); } } static final Function<@Nullable String, @Nullable Character> FIRST_CHARACTER = new Function<@Nullable String, @Nullable Character>() { @Override public @Nullable Character apply(@Nullable String input) { return input == null ? null : input.charAt(0); } }; abstract static class ColumnMapTests extends MapMapTests { ColumnMapTests( boolean allowsNullValues, boolean supportsRemove, boolean supportsClear, boolean supportsIteratorRemove) { super(allowsNullValues, supportsRemove, supportsClear, supportsIteratorRemove); } abstract Table<Integer, String, Character> makeTable(); @Override protected Map<String, Map<Integer, Character>> makePopulatedMap() { Table<Integer, String, Character> table = makeTable(); table.put(1, "foo", 'a'); table.put(1, "bar", 'b'); table.put(3, "foo", 'c'); return table.columnMap(); } @Override protected Map<String, Map<Integer, Character>> makeEmptyMap() { return makeTable().columnMap(); } } }
googleapis/google-cloud-java
36,134
java-meet/proto-google-cloud-meet-v2/src/main/java/com/google/apps/meet/v2/ListParticipantsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/apps/meet/v2/service.proto // Protobuf Java Version: 3.25.8 package com.google.apps.meet.v2; /** * * * <pre> * Request to fetch list of participants per conference. * </pre> * * Protobuf type {@code google.apps.meet.v2.ListParticipantsRequest} */ public final class ListParticipantsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.apps.meet.v2.ListParticipantsRequest) ListParticipantsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListParticipantsRequest.newBuilder() to construct. private ListParticipantsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListParticipantsRequest() { parent_ = ""; pageToken_ = ""; filter_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListParticipantsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.apps.meet.v2.ServiceProto .internal_static_google_apps_meet_v2_ListParticipantsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.apps.meet.v2.ServiceProto .internal_static_google_apps_meet_v2_ListParticipantsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.apps.meet.v2.ListParticipantsRequest.class, com.google.apps.meet.v2.ListParticipantsRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. Format: `conferenceRecords/{conference_record}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. Format: `conferenceRecords/{conference_record}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_ = 0; /** * * * <pre> * Maximum number of participants to return. The service might return fewer * than this value. * If unspecified, at most 100 participants are returned. * The maximum value is 250; values above 250 are coerced to 250. * Maximum might change in the future. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * Page token returned from previous List Call. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * Page token returned from previous List Call. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object filter_ = ""; /** * * * <pre> * Optional. User specified filtering condition in [EBNF * format](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form). * The following are the filterable fields: * * * `earliest_start_time` * * `latest_end_time` * * For example, `latest_end_time IS NULL` returns active participants in * the conference. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ @java.lang.Override public java.lang.String getFilter() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * * * <pre> * Optional. User specified filtering condition in [EBNF * format](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form). * The following are the filterable fields: * * * `earliest_start_time` * * `latest_end_time` * * For example, `latest_end_time IS NULL` returns active participants in * the conference. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ @java.lang.Override public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.apps.meet.v2.ListParticipantsRequest)) { return super.equals(obj); } com.google.apps.meet.v2.ListParticipantsRequest other = (com.google.apps.meet.v2.ListParticipantsRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getFilter().equals(other.getFilter())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.apps.meet.v2.ListParticipantsRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.apps.meet.v2.ListParticipantsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.apps.meet.v2.ListParticipantsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.apps.meet.v2.ListParticipantsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.apps.meet.v2.ListParticipantsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.apps.meet.v2.ListParticipantsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.apps.meet.v2.ListParticipantsRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.apps.meet.v2.ListParticipantsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.apps.meet.v2.ListParticipantsRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.apps.meet.v2.ListParticipantsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.apps.meet.v2.ListParticipantsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.apps.meet.v2.ListParticipantsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.apps.meet.v2.ListParticipantsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request to fetch list of participants per conference. * </pre> * * Protobuf type {@code google.apps.meet.v2.ListParticipantsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.apps.meet.v2.ListParticipantsRequest) com.google.apps.meet.v2.ListParticipantsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.apps.meet.v2.ServiceProto .internal_static_google_apps_meet_v2_ListParticipantsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.apps.meet.v2.ServiceProto .internal_static_google_apps_meet_v2_ListParticipantsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.apps.meet.v2.ListParticipantsRequest.class, com.google.apps.meet.v2.ListParticipantsRequest.Builder.class); } // Construct using com.google.apps.meet.v2.ListParticipantsRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; pageSize_ = 0; pageToken_ = ""; filter_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.apps.meet.v2.ServiceProto .internal_static_google_apps_meet_v2_ListParticipantsRequest_descriptor; } @java.lang.Override public com.google.apps.meet.v2.ListParticipantsRequest getDefaultInstanceForType() { return com.google.apps.meet.v2.ListParticipantsRequest.getDefaultInstance(); } @java.lang.Override public com.google.apps.meet.v2.ListParticipantsRequest build() { com.google.apps.meet.v2.ListParticipantsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.apps.meet.v2.ListParticipantsRequest buildPartial() { com.google.apps.meet.v2.ListParticipantsRequest result = new com.google.apps.meet.v2.ListParticipantsRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.apps.meet.v2.ListParticipantsRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.filter_ = filter_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.apps.meet.v2.ListParticipantsRequest) { return mergeFrom((com.google.apps.meet.v2.ListParticipantsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.apps.meet.v2.ListParticipantsRequest other) { if (other == com.google.apps.meet.v2.ListParticipantsRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { filter_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. Format: `conferenceRecords/{conference_record}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Format: `conferenceRecords/{conference_record}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Format: `conferenceRecords/{conference_record}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Format: `conferenceRecords/{conference_record}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Format: `conferenceRecords/{conference_record}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Maximum number of participants to return. The service might return fewer * than this value. * If unspecified, at most 100 participants are returned. * The maximum value is 250; values above 250 are coerced to 250. * Maximum might change in the future. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Maximum number of participants to return. The service might return fewer * than this value. * If unspecified, at most 100 participants are returned. * The maximum value is 250; values above 250 are coerced to 250. * Maximum might change in the future. * </pre> * * <code>int32 page_size = 2;</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Maximum number of participants to return. The service might return fewer * than this value. * If unspecified, at most 100 participants are returned. * The maximum value is 250; values above 250 are coerced to 250. * Maximum might change in the future. * </pre> * * <code>int32 page_size = 2;</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000002); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * Page token returned from previous List Call. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Page token returned from previous List Call. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Page token returned from previous List Call. * </pre> * * <code>string page_token = 3;</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Page token returned from previous List Call. * </pre> * * <code>string page_token = 3;</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Page token returned from previous List Call. * </pre> * * <code>string page_token = 3;</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object filter_ = ""; /** * * * <pre> * Optional. User specified filtering condition in [EBNF * format](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form). * The following are the filterable fields: * * * `earliest_start_time` * * `latest_end_time` * * For example, `latest_end_time IS NULL` returns active participants in * the conference. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ public java.lang.String getFilter() { java.lang.Object ref = filter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. User specified filtering condition in [EBNF * format](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form). * The following are the filterable fields: * * * `earliest_start_time` * * `latest_end_time` * * For example, `latest_end_time IS NULL` returns active participants in * the conference. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. User specified filtering condition in [EBNF * format](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form). * The following are the filterable fields: * * * `earliest_start_time` * * `latest_end_time` * * For example, `latest_end_time IS NULL` returns active participants in * the conference. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The filter to set. * @return This builder for chaining. */ public Builder setFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Optional. User specified filtering condition in [EBNF * format](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form). * The following are the filterable fields: * * * `earliest_start_time` * * `latest_end_time` * * For example, `latest_end_time IS NULL` returns active participants in * the conference. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Optional. User specified filtering condition in [EBNF * format](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form). * The following are the filterable fields: * * * `earliest_start_time` * * `latest_end_time` * * For example, `latest_end_time IS NULL` returns active participants in * the conference. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for filter to set. * @return This builder for chaining. */ public Builder setFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.apps.meet.v2.ListParticipantsRequest) } // @@protoc_insertion_point(class_scope:google.apps.meet.v2.ListParticipantsRequest) private static final com.google.apps.meet.v2.ListParticipantsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.apps.meet.v2.ListParticipantsRequest(); } public static com.google.apps.meet.v2.ListParticipantsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListParticipantsRequest> PARSER = new com.google.protobuf.AbstractParser<ListParticipantsRequest>() { @java.lang.Override public ListParticipantsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListParticipantsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListParticipantsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.apps.meet.v2.ListParticipantsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/commons-text
15,668
src/test/java/org/apache/commons/text/OssFuzzTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.text; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; public class OssFuzzTest { /** * Input is from clusterfuzz-testcase-StringSubstitutorInterpolatorFuzzer-5447769450741760 * * @throws Exception Thrown when the test fails. */ @Disabled("Until Apache Commons Lang 3.18.0") @Test public void test() throws Exception { StringSubstitutor.createInterpolator().replace( "¿ ,${const:ˇda´~e]W]~t$t${.ubase64encoder{con+s{.ubase64encoder{con+s~t....................................ˇˇˇˇˇˇˇˇˇˇ&${localhot:ˇˇˇˇˇˇ4ˇ..................................s${.!.${.. \\E],${conÅEEE]W€EÅE.!${.ubase64encoder{conÅEEE]W€EÅE.!${.ubase64encoder{con+s~t....................................ˇˇˇˇˇˇˇˇˇˇ&${localhot:ˇˇˇˇˇˇ-636ˇ...............................................................t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--......t]V]W€EÅE.!$${.u--}"); } /** * Input is from clusterfuzz-testcase-StringSubstitutorInterpolatorFuzzer-5447769450741760 * * @throws Exception Thrown when the test fails. */ @Test public void testStringSubstitutorInterpolatorFuzzer5447769450741760() throws Exception { StringSubstitutor.createInterpolator().replace("${date:swswswswswsswswswswswswwswsswswswswsswswswswsswswswswswswswswswswsws}"); } }
apache/hive
36,383
ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.exec.tez; import org.apache.hadoop.hive.ql.session.SessionStateUtil; import org.apache.hive.common.util.Ref; import org.apache.hadoop.hive.ql.exec.tez.UserPoolMapping.MappingInput; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import javax.annotation.Nullable; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.LogUtils; import org.apache.hadoop.hive.common.ServerUtils; import org.apache.hadoop.hive.common.metrics.common.Metrics; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConfUtil; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.exec.FileSinkOperator; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.exec.tez.monitoring.TezJobMonitor; import org.apache.hadoop.hive.ql.log.PerfLogger; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.BaseWork; import org.apache.hadoop.hive.ql.plan.MapWork; import org.apache.hadoop.hive.ql.plan.MergeJoinWork; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.ReduceWork; import org.apache.hadoop.hive.ql.plan.TezEdgeProperty; import org.apache.hadoop.hive.ql.plan.TezEdgeProperty.EdgeType; import org.apache.hadoop.hive.ql.plan.TezWork; import org.apache.hadoop.hive.ql.plan.UnionWork; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.wm.WmContext; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.tez.client.CallerContext; import org.apache.tez.client.TezClient; import org.apache.tez.common.counters.CounterGroup; import org.apache.tez.common.counters.TezCounter; import org.apache.tez.common.counters.TezCounters; import org.apache.tez.common.security.DAGAccessControls; import org.apache.tez.dag.api.DAG; import org.apache.tez.dag.api.Edge; import org.apache.tez.dag.api.GroupInputEdge; import org.apache.tez.dag.api.SessionNotRunning; import org.apache.tez.dag.api.TezConfiguration; import org.apache.tez.dag.api.TezException; import org.apache.tez.dag.api.Vertex; import org.apache.tez.dag.api.VertexGroup; import org.apache.tez.dag.api.client.DAGClient; import org.apache.tez.dag.api.client.DAGStatus; import org.apache.tez.dag.api.client.StatusGetOpts; import org.apache.tez.dag.api.client.VertexStatus; import org.apache.tez.runtime.library.api.TezRuntimeConfiguration; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.annotations.VisibleForTesting; import static org.apache.hadoop.hive.shims.HadoopShims.USER_ID; /** * * TezTask handles the execution of TezWork. Currently it executes a graph of map and reduce work * using the Tez APIs directly. * */ @SuppressWarnings({"serial"}) public class TezTask extends Task<TezWork> { private static final String CLASS_NAME = TezTask.class.getName(); private static final String JOB_ID_TEMPLATE = "job_%s%d_%s"; private static final String ICEBERG_PROPERTY_PREFIX = "iceberg.mr."; private static final String ICEBERG_SERIALIZED_TABLE_PREFIX = "iceberg.mr.serialized.table."; private static Logger LOG = LoggerFactory.getLogger(CLASS_NAME); private final PerfLogger perfLogger = SessionState.getPerfLogger(); private static final String TEZ_MEMORY_RESERVE_FRACTION = "tez.task.scale.memory.reserve-fraction"; private final TezRuntimeContext runtimeContext = new TezRuntimeContext(); private final DagUtils utils; private final Object dagClientLock = new Object(); private volatile boolean isShutdown = false; private DAGClient dagClient = null; Map<BaseWork, Vertex> workToVertex = new HashMap<BaseWork, Vertex>(); Map<BaseWork, JobConf> workToConf = new HashMap<BaseWork, JobConf>(); public TezTask() { this(DagUtils.getInstance()); } public TezTask(DagUtils utils) { super(); this.utils = utils; } public TezCounters getTezCounters() { return runtimeContext.getCounters(); } public void setTezCounters(final TezCounters counters) { runtimeContext.setCounters(counters); } public TezRuntimeContext getRuntimeContext() { return runtimeContext; } /** * Making TezTask backward compatible with the old MR-based Task API (ExecDriver/MapRedTask) */ @Override public String getExternalHandle() { return this.jobID; } @Override public int execute() { int rc = 1; boolean cleanContext = false; Context ctx = null; Ref<TezSessionState> sessionRef = Ref.from(null); final String queryId = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_QUERY_ID); TezJobMonitor monitor = null; try { // Get or create Context object. If we create it we have to clean it later as well. ctx = context; if (ctx == null) { ctx = new Context(conf); cleanContext = true; // some DDL task that directly executes a TezTask does not setup Context and hence TriggerContext. // Setting queryId is messed up. Some DDL tasks have executionId instead of proper queryId. WmContext wmContext = new WmContext(System.currentTimeMillis(), queryId); ctx.setWmContext(wmContext); } // Need to remove this static hack. But this is the way currently to get a session. SessionState ss = SessionState.get(); // Note: given that we return pool sessions to the pool in the finally block below, and that // we need to set the global to null to do that, this "reuse" may be pointless. TezSessionState session = sessionRef.value = ss.getTezSession(); if (session != null && !session.isOpen()) { LOG.warn("The session: " + session + " has not been opened"); } // We only need a username for UGI to use for groups; getGroups will fetch the groups // based on Hadoop configuration, as documented at // https://hadoop.apache.org/docs/r2.8.0/hadoop-project-dist/hadoop-common/GroupsMapping.html String userName = getUserNameForGroups(ss); List<String> groups = null; if (userName == null) { userName = "anonymous"; } else { try { groups = UserGroupInformation.createRemoteUser(userName).getGroups(); } catch (Exception ex) { LOG.warn("Cannot obtain groups for " + userName, ex); } } MappingInput mi = new MappingInput(userName, groups, ss.getHiveVariables().get("wmpool"), ss.getHiveVariables().get("wmapp")); WmContext wmContext = ctx.getWmContext(); String executionMode = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_MODE); runtimeContext.setExecutionMode(executionMode); // jobConf will hold all the configuration for hadoop, tez, and hive, which are not set in AM defaults JobConf jobConf = utils.createConfiguration(conf, false); // Setup the job specific keystore path if exists and put the password into the environment variables of tez am/tasks. HiveConfUtil.updateJobCredentialProviders(jobConf); // Get all user jars from work (e.g. input format stuff). String[] allNonConfFiles = work.configureJobConfAndExtractJars(jobConf); // DAG scratch dir. We get a session from the pool so it may be different from Tez one. // TODO: we could perhaps reuse the same directory for HiveResources? Path scratchDir = utils.createTezDir(ctx.getMRScratchDir(), conf); CallerContext callerContext = CallerContext.create("HIVE", String.format(USER_ID, queryPlan.getQueryId(), userName), "HIVE_QUERY_ID", queryPlan.getQueryStr()); perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.TEZ_GET_SESSION); session = sessionRef.value = WorkloadManagerFederation.getSession( sessionRef.value, conf, mi, getWork().getLlapMode(), wmContext); perfLogger.perfLogEnd(CLASS_NAME, PerfLogger.TEZ_GET_SESSION); try { ss.setTezSession(session); LOG.info("Subscribed to counters: {} for queryId: {}", wmContext.getSubscribedCounters(), wmContext.getQueryId()); // Ensure the session is open and has the necessary local resources. // This would refresh any conf resources and also local resources. ensureSessionHasResources(session, allNonConfFiles); // This is a combination of the jar stuff from conf, and not from conf. List<LocalResource> allNonAppResources = session.getLocalizedResources(); logResources(allNonAppResources); Map<String, LocalResource> allResources = DagUtils.createTezLrMap( session.getAppJarLr(), allNonAppResources); // next we translate the TezWork to a Tez DAG DAG dag = build(jobConf, work, scratchDir, ctx, allResources); dag.setCallerContext(callerContext); // Note: we no longer call addTaskLocalFiles because all the resources are correctly // updated in the session resource lists now, and thus added to vertices. // If something breaks, dag.addTaskLocalFiles might need to be called here. // Check isShutdown opportunistically; it's never unset. if (this.isShutdown) { throw new HiveException("Operation cancelled"); } DAGClient dagClient = submit(dag, sessionRef); session = sessionRef.value; boolean wasShutdown = false; synchronized (dagClientLock) { assert this.dagClient == null; wasShutdown = this.isShutdown; if (!wasShutdown) { this.dagClient = dagClient; } } if (wasShutdown) { closeDagClientOnCancellation(dagClient); throw new HiveException("Operation cancelled"); } // Log all the info required to find the various logs for this query String dagId = this.dagClient.getDagIdentifierString(); String appId = this.dagClient.getSessionIdentifierString(); LOG.info("HS2 Host: [{}], Query ID: [{}], Dag ID: [{}], DAG App ID: [{}], DAG App address: [{}]", ServerUtils.hostname(), queryId, dagId, appId, session.getSession().getAmHost()); LogUtils.putToMDC(LogUtils.DAGID_KEY, dagId); this.jobID = dagId; runtimeContext.setDagId(dagId); runtimeContext.setSessionId(session.getSessionId()); runtimeContext.setApplicationId(appId); // finally monitor will print progress until the job is done monitor = new TezJobMonitor(session, work.getAllWork(), dagClient, conf, dag, ctx, runtimeContext.counters, perfLogger); runtimeContext.setMonitor(monitor); rc = monitor.monitorExecution(); if (rc != 0) { this.setException(new TezRuntimeException(dagId, monitor.getDiagnostics())); } try { // fetch the counters Set<StatusGetOpts> statusGetOpts = EnumSet.of(StatusGetOpts.GET_COUNTERS); DAGStatus dagStatus = dagClient.getDAGStatus(statusGetOpts); this.setStatusMessage(dagStatus.getState().name()); TezCounters dagCounters = dagStatus.getDAGCounters(); // if initial counters exists, merge it with dag counters to get aggregated view TezCounters mergedCounters = runtimeContext.counters == null ? dagCounters : Utils.mergeTezCounters( dagCounters, runtimeContext.counters); runtimeContext.counters = mergedCounters; } catch (Exception err) { // Don't fail execution due to counters - just don't print summary info LOG.warn("Failed to get counters. Ignoring, summary info will be incomplete.", err); runtimeContext.counters = null; } // save useful commit information into query state, e.g. for custom commit hooks, like Iceberg if (rc == 0) { collectCommitInformation(work); } } finally { // Note: due to TEZ-3846, the session may actually be invalid in case of some errors. // Currently, reopen on an attempted reuse will take care of that; we cannot tell // if the session is usable until we try. // We return this to the pool even if it's unusable; reopen is supposed to handle this. wmContext = ctx.getWmContext(); try { if (sessionRef.value != null) { sessionRef.value.returnToSessionManager(); } } catch (Exception e) { LOG.error("Failed to return session: {} to pool", session, e); throw e; } if (!conf.getVar(HiveConf.ConfVars.TEZ_SESSION_EVENTS_SUMMARY).equalsIgnoreCase("none") && wmContext != null) { if (conf.getVar(HiveConf.ConfVars.TEZ_SESSION_EVENTS_SUMMARY).equalsIgnoreCase("json")) { wmContext.printJson(console); } else if (conf.getVar(HiveConf.ConfVars.TEZ_SESSION_EVENTS_SUMMARY).equalsIgnoreCase("text")) { wmContext.print(console); } } } if (LOG.isInfoEnabled() && runtimeContext.counters != null && (HiveConf.getBoolVar(conf, HiveConf.ConfVars.TEZ_EXEC_SUMMARY) || Utilities.isPerfOrAboveLogging(conf))) { for (CounterGroup group : runtimeContext.counters) { monitor.logger().printInfo(group.getDisplayName() + ":"); for (TezCounter counter : group) { monitor.logger().printInfo(" " + counter.getDisplayName() + ": " + counter.getValue()); } } } updateNumRows(); } catch (Exception e) { LOG.error("Failed to execute tez graph.", e); setException(e); // rc will be 1 at this point indicating failure. } finally { Utilities.clearWork(conf); // Clear gWorkMap for (BaseWork w : work.getAllWork()) { JobConf workCfg = workToConf.get(w); if (workCfg != null) { Utilities.clearWorkMapForConf(workCfg); } } if (cleanContext) { try { ctx.clear(); } catch (Exception e) { /*best effort*/ LOG.warn("Failed to clean up after tez job", e); } } // need to either move tmp files or remove them DAGClient dagClient = null; synchronized (dagClientLock) { dagClient = this.dagClient; this.dagClient = null; } // TODO: not clear why we don't do the rest of the cleanup if dagClient is not created. // E.g. jobClose will be called if we fail after dagClient creation but no before... // DagClient as such should have no bearing on jobClose. if (dagClient != null) { // rc will only be overwritten if close errors out rc = close(work, rc, dagClient); } if (monitor != null){ monitor.logger().endSummary(); } } return rc; } private void collectCommitInformation(TezWork work) throws IOException, TezException { for (BaseWork w : work.getAllWork()) { JobConf jobConf = workToConf.get(w); Vertex vertex = workToVertex.get(w); boolean hasIcebergCommitter = Optional.ofNullable(jobConf).map(JobConf::getOutputCommitter) .map(Object::getClass).map(Class::getName) .filter(name -> name.endsWith("HiveIcebergNoJobCommitter")).isPresent(); // we should only consider jobs with Iceberg output committer and a data sink if (hasIcebergCommitter && !vertex.getDataSinks().isEmpty()) { VertexStatus status = dagClient.getVertexStatus(vertex.getName(), EnumSet.of(StatusGetOpts.GET_COUNTERS)); String[] jobIdParts = status.getId().split("_"); // status.getId() returns something like: vertex_1617722404520_0001_1_00 // this should be transformed to a parsable JobID: job_16177224045200_0001 int vertexId = Integer.parseInt(jobIdParts[jobIdParts.length - 1]); String jobId = String.format(JOB_ID_TEMPLATE, jobIdParts[1], vertexId, jobIdParts[2]); List<String> tables = new ArrayList<>(); Map<String, String> icebergProperties = new HashMap<>(); for (Map.Entry<String, String> entry : jobConf) { if (entry.getKey().startsWith(ICEBERG_SERIALIZED_TABLE_PREFIX)) { // get all target tables this vertex wrote to tables.add(entry.getKey().substring(ICEBERG_SERIALIZED_TABLE_PREFIX.length())); } else if (entry.getKey().startsWith(ICEBERG_PROPERTY_PREFIX)) { // find iceberg props in jobConf as they can be needed, but not available, during job commit icebergProperties.put(entry.getKey(), entry.getValue()); } } // save information for each target table tables.forEach(table -> SessionStateUtil.addCommitInfo(jobConf, table, jobId, status.getProgress().getSucceededTaskCount(), icebergProperties)); } } } private void updateNumRows() { if (runtimeContext.counters != null) { TezCounter counter = runtimeContext.counters.findCounter( conf.getVar(HiveConf.ConfVars.HIVE_COUNTER_GROUP), FileSinkOperator.TOTAL_TABLE_ROWS_WRITTEN); if (counter != null) { queryState.setNumModifiedRows(counter.getValue()); } } } private String getUserNameForGroups(SessionState ss) { // This should be removed when authenticator and the 2-username mess is cleaned up. if (ss.getAuthenticator() != null) { String userName = ss.getAuthenticator().getUserName(); if (userName != null) return userName; } return ss.getUserName(); } private void closeDagClientOnCancellation(DAGClient dagClient) { try { dagClient.tryKillDAG(); LOG.info("Waiting for Tez task to shut down: " + this); dagClient.waitForCompletion(); } catch (Exception ex) { LOG.warn("Failed to shut down TezTask" + this, ex); } closeDagClientWithoutEx(dagClient); } private void logResources(List<LocalResource> additionalLr) { // log which resources we're adding (apart from the hive exec) if (!LOG.isDebugEnabled()) return; if (additionalLr == null || additionalLr.size() == 0) { LOG.debug("No local resources to process (other than hive-exec)"); } else { for (LocalResource lr: additionalLr) { LOG.debug("Adding local resource: " + lr.getResource()); } } } /** * Ensures that the Tez Session is open and the AM has all necessary jars configured. */ @VisibleForTesting void ensureSessionHasResources( TezSessionState session, String[] nonConfResources) throws Exception { TezClient client = session.getSession(); // TODO null can also mean that this operation was interrupted. Should we really try to re-create the session in that case ? if (client == null) { // Note: the only sane case where this can happen is the non-pool one. We should get rid // of it, in non-pool case perf doesn't matter so we might as well open at get time // and then call update like we do in the else. // Can happen if the user sets the tez flag after the session was established. LOG.info("Tez session hasn't been created yet. Opening session"); session.open(nonConfResources); } else { LOG.info("Session is already open"); session.ensureLocalResources(conf, nonConfResources); } } void checkOutputSpec(BaseWork work, JobConf jc) throws IOException { for (Operator<?> op : work.getAllOperators()) { if (op instanceof FileSinkOperator) { ((FileSinkOperator) op).checkOutputSpecs(null, jc); } } } DAG build(JobConf conf, TezWork tezWork, Path scratchDir, Context ctx, Map<String, LocalResource> vertexResources) throws Exception { perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.TEZ_BUILD_DAG); // getAllWork returns a topologically sorted list, which we use to make // sure that vertices are created before they are used in edges. List<BaseWork> topologicalWorkList = tezWork.getAllWork(); Collections.reverse(topologicalWorkList); // the name of the dag is what is displayed in the AM/Job UI String dagName = utils.createDagName(conf, queryPlan); LOG.info("Dag name: {}", dagName); DAG dag = DAG.create(dagName); // set some info for the query JSONObject json = new JSONObject(new LinkedHashMap<>()).put("context", "Hive") .put("description", ctx.getCmd()); String dagInfo = json.toString(); String queryId = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_QUERY_ID); dag.setConf(HiveConf.ConfVars.HIVE_QUERY_ID.varname, queryId); LOG.debug("DagInfo: {}", dagInfo); TezConfigurationFactory.addProgrammaticallyAddedTezOptsToDagConf(dag.getDagConf(), conf); dag.setDAGInfo(dagInfo); dag.setCredentials(conf.getCredentials()); setAccessControlsForCurrentUser(dag, queryPlan.getQueryId(), conf); for (BaseWork workUnit: topologicalWorkList) { // translate work to vertex perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.TEZ_CREATE_VERTEX + workUnit.getName()); if (workUnit instanceof UnionWork) { // Special case for unions. These items translate to VertexGroups List<BaseWork> unionWorkItems = new LinkedList<BaseWork>(); List<BaseWork> children = new LinkedList<BaseWork>(); // split the children into vertices that make up the union and vertices that are // proper children of the union for (BaseWork v: tezWork.getChildren(workUnit)) { EdgeType type = tezWork.getEdgeProperty(workUnit, v).getEdgeType(); if (type == EdgeType.CONTAINS) { unionWorkItems.add(v); } else { children.add(v); } } JobConf parentConf = workToConf.get(unionWorkItems.get(0)); checkOutputSpec(workUnit, parentConf); // create VertexGroup Vertex[] vertexArray = new Vertex[unionWorkItems.size()]; int i = 0; for (BaseWork v: unionWorkItems) { vertexArray[i++] = workToVertex.get(v); } VertexGroup group = dag.createVertexGroup(workUnit.getName(), vertexArray); // For a vertex group, all Outputs use the same Key-class, Val-class and partitioner. // Pick any one source vertex to figure out the Edge configuration. // now hook up the children for (BaseWork v: children) { // finally we can create the grouped edge GroupInputEdge e = utils.createEdge(group, parentConf, workToVertex.get(v), tezWork.getEdgeProperty(workUnit, v), v, tezWork); dag.addEdge(e); } } else { // Regular vertices JobConf wxConf = utils.initializeVertexConf(conf, ctx, workUnit); checkOutputSpec(workUnit, wxConf); Vertex wx = utils.createVertex(wxConf, workUnit, scratchDir, tezWork, vertexResources); if (tezWork.getChildren(workUnit).size() > 1) { String tezRuntimeSortMb = wxConf.get(TezRuntimeConfiguration.TEZ_RUNTIME_IO_SORT_MB); int originalValue = 0; if(tezRuntimeSortMb == null) { originalValue = TezRuntimeConfiguration.TEZ_RUNTIME_IO_SORT_MB_DEFAULT; } else { originalValue = Integer.valueOf(tezRuntimeSortMb); } int newValue = originalValue / tezWork.getChildren(workUnit).size(); wxConf.set(TezRuntimeConfiguration.TEZ_RUNTIME_IO_SORT_MB, Integer.toString(newValue)); LOG.info("Modified " + TezRuntimeConfiguration.TEZ_RUNTIME_IO_SORT_MB + " to " + newValue); } if (workUnit.getReservedMemoryMB() > 0) { // If reversedMemoryMB is set, make memory allocation fraction adjustment as needed double frac = DagUtils.adjustMemoryReserveFraction(workUnit.getReservedMemoryMB(), super.conf); LOG.info("Setting " + TEZ_MEMORY_RESERVE_FRACTION + " to " + frac); wx.setConf(TEZ_MEMORY_RESERVE_FRACTION, Double.toString(frac)); } // Otherwise just leave it up to Tez to decide how much memory to allocate dag.addVertex(wx); utils.addCredentials(workUnit, dag, conf); perfLogger.perfLogEnd(CLASS_NAME, PerfLogger.TEZ_CREATE_VERTEX + workUnit.getName()); workToVertex.put(workUnit, wx); workToConf.put(workUnit, wxConf); // add all dependencies (i.e.: edges) to the graph for (BaseWork v: tezWork.getChildren(workUnit)) { assert workToVertex.containsKey(v); Edge e = null; TezEdgeProperty edgeProp = tezWork.getEdgeProperty(workUnit, v); e = utils.createEdge(wxConf, wx, workToVertex.get(v), edgeProp, v, tezWork); dag.addEdge(e); } } } // Clear the work map after build. TODO: remove caching instead? Utilities.clearWorkMap(conf); perfLogger.perfLogEnd(CLASS_NAME, PerfLogger.TEZ_BUILD_DAG); return dag; } private static void setAccessControlsForCurrentUser(DAG dag, String queryId, Configuration conf) throws IOException { String user = SessionState.getUserFromAuthenticator(); UserGroupInformation loginUserUgi = UserGroupInformation.getLoginUser(); String loginUser = loginUserUgi == null ? null : loginUserUgi.getShortUserName(); boolean addHs2User = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_TEZ_HS2_USER_ACCESS); // Temporarily re-using the TEZ AM View ACLs property for individual dag access control. // Hive may want to setup it's own parameters if it wants to control per dag access. // Setting the tez-property per dag should work for now. String viewStr = Utilities.getAclStringWithHiveModification(conf, TezConfiguration.TEZ_AM_VIEW_ACLS, addHs2User, user, loginUser); String modifyStr = Utilities.getAclStringWithHiveModification(conf, TezConfiguration.TEZ_AM_MODIFY_ACLS, addHs2User, user, loginUser); LOG.debug("Setting Tez DAG access for queryId={} with viewAclString={}, modifyStr={}", queryId, viewStr, modifyStr); // set permissions for current user on DAG DAGAccessControls ac = new DAGAccessControls(viewStr, modifyStr); dag.setAccessControls(ac); } private TezSessionState getNewTezSessionOnError( TezSessionState oldSession) throws Exception { // Note: we don't pass the config to reopen. If the session was already open, it would // have kept running with its current config - preserve that behavior. TezSessionState newSession = oldSession.reopen(); console.printInfo("Session re-established."); return newSession; } DAGClient submit(DAG dag, Ref<TezSessionState> sessionStateRef) throws Exception { perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.TEZ_SUBMIT_DAG); DAGClient dagClient = null; TezSessionState sessionState = sessionStateRef.value; try { try { // ready to start execution on the cluster dagClient = submitInternal(dag, sessionState); } catch (SessionNotRunning nr) { console.printInfo("Tez session was closed. Reopening..."); sessionStateRef.value = sessionState = getNewTezSessionOnError(sessionState); console.printInfo("Session re-established."); dagClient = submitInternal(dag, sessionState); } } catch (Exception e) { if (this.isShutdown) { // Incase of taskShutdown, no need to retry sessionDestroyOrReturnToPool(sessionStateRef, sessionState); throw e; } // In case of any other exception, retry. If this also fails, report original error and exit. try { console.printInfo("Dag submit failed due to " + e.getMessage() + " stack trace: " + Arrays.toString(e.getStackTrace()) + " retrying..."); sessionStateRef.value = sessionState = getNewTezSessionOnError(sessionState); dagClient = submitInternal(dag, sessionState); } catch (Exception retryException) { // we failed to submit after retrying. // If this is a non-pool session, destroy it. // Otherwise move it to sessionPool, reopen will retry. sessionDestroyOrReturnToPool(sessionStateRef, sessionState); throw retryException; } } perfLogger.perfLogEnd(CLASS_NAME, PerfLogger.TEZ_SUBMIT_DAG); return new SyncDagClient(dagClient); } private DAGClient submitInternal(DAG dag, TezSessionState sessionState) throws TezException, IOException { runtimeContext.init(sessionState); return sessionState.getSession().submitDAG(dag); } private void sessionDestroyOrReturnToPool(Ref<TezSessionState> sessionStateRef, TezSessionState sessionState) throws Exception{ sessionStateRef.value = null; if (sessionState.isDefault() && sessionState instanceof TezSessionPoolSession) { sessionState.returnToSessionManager(); } else { sessionState.destroy(); } } /* * close will move the temp files into the right place for the fetch * task. If the job has failed it will clean up the files. */ @VisibleForTesting int close(TezWork work, int rc, DAGClient dagClient) { try { List<BaseWork> ws = work.getAllWork(); for (BaseWork w: ws) { if (w instanceof MergeJoinWork) { w = ((MergeJoinWork) w).getMainWork(); } for (Operator<?> op: w.getAllOperators()) { op.jobClose(conf, rc == 0); } } } catch (Exception e) { // jobClose needs to execute successfully otherwise fail task if (rc == 0) { rc = 3; String mesg = "Job Commit failed with exception '" + Utilities.getNameMessage(e) + "'"; console.printError(mesg, "\n" + StringUtils.stringifyException(e)); } } if (dagClient != null) { // null in tests closeDagClientWithoutEx(dagClient); } return rc; } /** * Close DagClient, log warning if it throws any exception. * We don't want to fail query if that function fails. */ private static void closeDagClientWithoutEx(DAGClient dagClient) { try { dagClient.close(); } catch (Exception e) { LOG.warn("Failed to close DagClient", e); } } @Override public void updateTaskMetrics(Metrics metrics) { metrics.incrementCounter(MetricsConstant.HIVE_TEZ_TASKS); } @Override public boolean isMapRedTask() { return true; } @Override public StageType getType() { return StageType.MAPRED; } @Override public String getName() { return "TEZ"; } @Override public boolean canExecuteInParallel() { return false; } @Override public Collection<MapWork> getMapWork() { List<MapWork> result = new LinkedList<MapWork>(); TezWork work = getWork(); // framework expects MapWork instances that have no physical parents (i.e.: union parent is // fine, broadcast parent isn't) for (BaseWork w: work.getAllWorkUnsorted()) { if (w instanceof MapWork) { List<BaseWork> parents = work.getParents(w); boolean candidate = true; for (BaseWork parent: parents) { if (!(parent instanceof UnionWork)) { candidate = false; } } if (candidate) { result.add((MapWork)w); } } } return result; } @Override public Operator<? extends OperatorDesc> getReducer(MapWork mapWork) { List<BaseWork> children = getWork().getChildren(mapWork); if (children.size() != 1) { return null; } if (!(children.get(0) instanceof ReduceWork)) { return null; } return ((ReduceWork)children.get(0)).getReducer(); } @Override public void shutdown() { super.shutdown(); DAGClient dagClient = null; synchronized (dagClientLock) { isShutdown = true; dagClient = this.dagClient; // Don't set dagClient to null here - execute will only clean up operators if it's set. } LOG.info("Shutting down Tez task " + this + " " + ((dagClient == null) ? " before submit" : "")); if (dagClient == null) return; closeDagClientOnCancellation(dagClient); } /** DAG client that does dumb global sync on all the method calls; * Tez DAG client is not thread safe and getting the 2nd one is not recommended. */ public class SyncDagClient extends DAGClient { private final DAGClient dagClient; public SyncDagClient(DAGClient dagClient) { super(); this.dagClient = dagClient; } @Override public void close() throws IOException { dagClient.close(); // Don't sync. } public String getDagIdentifierString() { return dagClient.getDagIdentifierString(); } public String getSessionIdentifierString() { return dagClient.getSessionIdentifierString(); } @Override public String getExecutionContext() { return dagClient.getExecutionContext(); // Don't sync. } @Override @Private protected ApplicationReport getApplicationReportInternal() { throw new UnsupportedOperationException(); // The method is not exposed, and we don't use it. } @Override public DAGStatus getDAGStatus(@Nullable Set<StatusGetOpts> statusOptions) throws IOException, TezException { synchronized (dagClient) { return dagClient.getDAGStatus(statusOptions); } } @Override public DAGStatus getDAGStatus(@Nullable Set<StatusGetOpts> statusOptions, long timeout) throws IOException, TezException { synchronized (dagClient) { return dagClient.getDAGStatus(statusOptions, timeout); } } @Override public VertexStatus getVertexStatus(String vertexName, Set<StatusGetOpts> statusOptions) throws IOException, TezException { synchronized (dagClient) { return dagClient.getVertexStatus(vertexName, statusOptions); } } @Override public void tryKillDAG() throws IOException, TezException { synchronized (dagClient) { dagClient.tryKillDAG(); } } @Override public DAGStatus waitForCompletion() throws IOException, TezException, InterruptedException { synchronized (dagClient) { return dagClient.waitForCompletion(); } } @Override public DAGStatus waitForCompletionWithStatusUpdates(@Nullable Set<StatusGetOpts> statusGetOpts) throws IOException, TezException, InterruptedException { synchronized (dagClient) { return dagClient.waitForCompletionWithStatusUpdates(statusGetOpts); } } @Override public String getWebUIAddress() throws IOException, TezException { synchronized (dagClient) { return dagClient.getWebUIAddress(); } } } }
apache/hbase
36,241
hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.rest.model; import com.fasterxml.jackson.annotation.JsonInclude; import java.io.IOException; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Base64; import java.util.List; import java.util.Map; import java.util.NavigableSet; import java.util.Objects; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.filter.BinaryComparator; import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; import org.apache.hadoop.hbase.filter.BitComparator; import org.apache.hadoop.hbase.filter.ByteArrayComparable; import org.apache.hadoop.hbase.filter.ColumnCountGetFilter; import org.apache.hadoop.hbase.filter.ColumnPaginationFilter; import org.apache.hadoop.hbase.filter.ColumnPrefixFilter; import org.apache.hadoop.hbase.filter.ColumnRangeFilter; import org.apache.hadoop.hbase.filter.CompareFilter; import org.apache.hadoop.hbase.filter.DependentColumnFilter; import org.apache.hadoop.hbase.filter.FamilyFilter; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.FilterList; import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter; import org.apache.hadoop.hbase.filter.FuzzyRowFilter; import org.apache.hadoop.hbase.filter.InclusiveStopFilter; import org.apache.hadoop.hbase.filter.KeyOnlyFilter; import org.apache.hadoop.hbase.filter.MultiRowRangeFilter; import org.apache.hadoop.hbase.filter.MultiRowRangeFilter.RowRange; import org.apache.hadoop.hbase.filter.MultipleColumnPrefixFilter; import org.apache.hadoop.hbase.filter.NullComparator; import org.apache.hadoop.hbase.filter.PageFilter; import org.apache.hadoop.hbase.filter.PrefixFilter; import org.apache.hadoop.hbase.filter.QualifierFilter; import org.apache.hadoop.hbase.filter.RandomRowFilter; import org.apache.hadoop.hbase.filter.RegexStringComparator; import org.apache.hadoop.hbase.filter.RowFilter; import org.apache.hadoop.hbase.filter.SingleColumnValueExcludeFilter; import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; import org.apache.hadoop.hbase.filter.SkipFilter; import org.apache.hadoop.hbase.filter.SubstringComparator; import org.apache.hadoop.hbase.filter.TimestampsFilter; import org.apache.hadoop.hbase.filter.ValueFilter; import org.apache.hadoop.hbase.filter.WhileMatchFilter; import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; import org.apache.hadoop.hbase.rest.RestUtil; import org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner; import org.apache.hadoop.hbase.security.visibility.Authorizations; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hbase.thirdparty.com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider; import org.apache.hbase.thirdparty.com.google.protobuf.ByteString; import org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream; import org.apache.hbase.thirdparty.com.google.protobuf.Message; import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations; import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType; /** * A representation of Scanner parameters. * * <pre> * &lt;complexType name="Scanner"&gt; * &lt;sequence&gt; * &lt;element name="column" type="base64Binary" minOccurs="0" maxOccurs="unbounded"/&gt; * &lt;element name="filter" type="string" minOccurs="0" maxOccurs="1"&gt;&lt;/element&gt; * &lt;/sequence&gt; * &lt;attribute name="startRow" type="base64Binary"&gt;&lt;/attribute&gt; * &lt;attribute name="endRow" type="base64Binary"&gt;&lt;/attribute&gt; * &lt;attribute name="batch" type="int"&gt;&lt;/attribute&gt; * &lt;attribute name="caching" type="int"&gt;&lt;/attribute&gt; * &lt;attribute name="startTime" type="int"&gt;&lt;/attribute&gt; * &lt;attribute name="endTime" type="int"&gt;&lt;/attribute&gt; * &lt;attribute name="maxVersions" type="int"&gt;&lt;/attribute&gt; * &lt;/complexType&gt; * </pre> */ @XmlRootElement(name = "Scanner") @JsonInclude(JsonInclude.Include.NON_NULL) @InterfaceAudience.Private public class ScannerModel implements ProtobufMessageHandler, Serializable { private static final long serialVersionUID = 1L; private byte[] startRow = HConstants.EMPTY_START_ROW; private byte[] endRow = HConstants.EMPTY_END_ROW; private List<byte[]> columns = new ArrayList<>(); private int batch = Integer.MAX_VALUE; private long startTime = 0; private long endTime = Long.MAX_VALUE; private String filter = null; private int maxVersions = Integer.MAX_VALUE; private int caching = -1; private List<String> labels = new ArrayList<>(); private boolean cacheBlocks = true; private int limit = -1; @JsonInclude(value = JsonInclude.Include.CUSTOM, valueFilter = IncludeStartRowFilter.class) private boolean includeStartRow = true; @JsonInclude(value = JsonInclude.Include.NON_DEFAULT) private boolean includeStopRow = false; @XmlAttribute public boolean isIncludeStopRow() { return includeStopRow; } public void setIncludeStopRow(boolean includeStopRow) { this.includeStopRow = includeStopRow; } @XmlAttribute public boolean isIncludeStartRow() { return includeStartRow; } public void setIncludeStartRow(boolean includeStartRow) { this.includeStartRow = includeStartRow; } @edu.umd.cs.findbugs.annotations.SuppressWarnings( value = { "EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS", "HE_EQUALS_NO_HASHCODE", "HE_EQUALS_USE_HASHCODE" }, justification = "1.The supplied value from the JSON Value Filter is of Type Boolean, hence supressing the check, 2.hashCode method will not be invoked, hence supressing the check") private static class IncludeStartRowFilter { @Override public boolean equals(Object value) { return Boolean.TRUE.equals(value); } } /** * Implement lazily-instantiated singleton as per recipe here: * http://literatejava.com/jvm/fastest-threadsafe-singleton-jvm/ */ private static class JaxbJsonProviderHolder { static final JacksonJaxbJsonProvider INSTANCE = new JacksonJaxbJsonProvider(); } @XmlRootElement static class FilterModel { @XmlRootElement static class ByteArrayComparableModel { @XmlAttribute public String type; @XmlAttribute public String value; @XmlAttribute public String op; static enum ComparatorType { BinaryComparator, BinaryPrefixComparator, BitComparator, NullComparator, RegexStringComparator, SubstringComparator } public ByteArrayComparableModel() { } public ByteArrayComparableModel(ByteArrayComparable comparator) { String typeName = comparator.getClass().getSimpleName(); ComparatorType type = ComparatorType.valueOf(typeName); this.type = typeName; switch (type) { case BinaryComparator: case BinaryPrefixComparator: this.value = Bytes.toString(Base64.getEncoder().encode(comparator.getValue())); break; case BitComparator: this.value = Bytes.toString(Base64.getEncoder().encode(comparator.getValue())); this.op = ((BitComparator) comparator).getOperator().toString(); break; case NullComparator: break; case RegexStringComparator: case SubstringComparator: this.value = Bytes.toString(comparator.getValue()); break; default: throw new RuntimeException("unhandled filter type: " + type); } } public ByteArrayComparable build() { ByteArrayComparable comparator; switch (ComparatorType.valueOf(type)) { case BinaryComparator: comparator = new BinaryComparator(Base64.getDecoder().decode(value)); break; case BinaryPrefixComparator: comparator = new BinaryPrefixComparator(Base64.getDecoder().decode(value)); break; case BitComparator: comparator = new BitComparator(Base64.getDecoder().decode(value), BitComparator.BitwiseOp.valueOf(op)); break; case NullComparator: comparator = new NullComparator(); break; case RegexStringComparator: comparator = new RegexStringComparator(value); break; case SubstringComparator: comparator = new SubstringComparator(value); break; default: throw new RuntimeException("unhandled comparator type: " + type); } return comparator; } } /** * This DTO omits the pseudo-getters in MultiRowRangeFilter.RowRange which break Jackson * deserialization. It also avoids adding those as dummy JSON elements. */ static class RowRangeModel { protected byte[] startRow; protected boolean startRowInclusive = true; protected byte[] stopRow; protected boolean stopRowInclusive = false; public RowRangeModel() { } public RowRangeModel(MultiRowRangeFilter.RowRange rr) { this.startRow = rr.getStartRow(); this.startRowInclusive = rr.isStartRowInclusive(); this.stopRow = rr.getStopRow(); this.stopRowInclusive = rr.isStopRowInclusive(); } public MultiRowRangeFilter.RowRange build() { return new MultiRowRangeFilter.RowRange(startRow, startRowInclusive, stopRow, stopRowInclusive); } public byte[] getStartRow() { return startRow; } public byte[] getStopRow() { return stopRow; } /** Returns if start row is inclusive. */ public boolean isStartRowInclusive() { return startRowInclusive; } /** Returns if stop row is inclusive. */ public boolean isStopRowInclusive() { return stopRowInclusive; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + Arrays.hashCode(startRow); result = prime * result + Arrays.hashCode(stopRow); result = prime * result + Objects.hash(startRowInclusive, stopRowInclusive); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof RowRangeModel)) { return false; } RowRangeModel other = (RowRangeModel) obj; return Arrays.equals(startRow, other.startRow) && startRowInclusive == other.startRowInclusive && Arrays.equals(stopRow, other.stopRow) && stopRowInclusive == other.stopRowInclusive; } } static class FuzzyKeyModel { protected byte[] key; protected byte[] mask; public FuzzyKeyModel() { } public FuzzyKeyModel(Pair<byte[], byte[]> keyWithMask) { this.key = keyWithMask.getFirst(); this.mask = keyWithMask.getSecond(); } public Pair<byte[], byte[]> build() { return new Pair<>(key, mask); } public byte[] getKey() { return key; } public void setKey(byte[] key) { this.key = key; } public byte[] getMask() { return mask; } public void setMask(byte[] mask) { this.mask = mask; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + Arrays.hashCode(key); result = prime * result + Arrays.hashCode(mask); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof FuzzyKeyModel)) { return false; } FuzzyKeyModel other = (FuzzyKeyModel) obj; return Arrays.equals(key, other.key) && Arrays.equals(mask, other.mask); } } // A grab bag of fields, would have been a union if this were C. // These are null by default and will only be serialized if set (non null). @XmlAttribute public String type; @XmlAttribute public String op; @XmlElement ByteArrayComparableModel comparator; @XmlAttribute public String value; @XmlElement public List<FilterModel> filters; @XmlAttribute public Integer limit; @XmlAttribute public Integer offset; @XmlAttribute public String family; @XmlAttribute public String qualifier; @XmlAttribute public Boolean ifMissing; @XmlAttribute public Boolean latestVersion; @XmlAttribute public String minColumn; @XmlAttribute public Boolean minColumnInclusive; @XmlAttribute public String maxColumn; @XmlAttribute public Boolean maxColumnInclusive; @XmlAttribute public Boolean dropDependentColumn; @XmlAttribute public Float chance; @XmlElement public List<String> prefixes; @XmlElement private List<RowRangeModel> ranges; @XmlElement public List<Long> timestamps; @XmlElement private List<FuzzyKeyModel> fuzzyKeys; static enum FilterType { ColumnCountGetFilter, ColumnPaginationFilter, ColumnPrefixFilter, ColumnRangeFilter, DependentColumnFilter, FamilyFilter, FilterList, FirstKeyOnlyFilter, InclusiveStopFilter, KeyOnlyFilter, MultipleColumnPrefixFilter, MultiRowRangeFilter, PageFilter, PrefixFilter, QualifierFilter, RandomRowFilter, RowFilter, SingleColumnValueExcludeFilter, SingleColumnValueFilter, SkipFilter, TimestampsFilter, ValueFilter, WhileMatchFilter, FuzzyRowFilter } public FilterModel() { } public FilterModel(Filter filter) { String typeName = filter.getClass().getSimpleName(); FilterType type = FilterType.valueOf(typeName); this.type = typeName; switch (type) { case ColumnCountGetFilter: this.limit = ((ColumnCountGetFilter) filter).getLimit(); break; case ColumnPaginationFilter: this.limit = ((ColumnPaginationFilter) filter).getLimit(); this.offset = ((ColumnPaginationFilter) filter).getOffset(); break; case ColumnPrefixFilter: byte[] src = ((ColumnPrefixFilter) filter).getPrefix(); this.value = Bytes.toString(Base64.getEncoder().encode(src)); break; case ColumnRangeFilter: ColumnRangeFilter crf = (ColumnRangeFilter) filter; this.minColumn = Bytes.toString(Base64.getEncoder().encode(crf.getMinColumn())); this.minColumnInclusive = crf.getMinColumnInclusive(); this.maxColumn = Bytes.toString(Base64.getEncoder().encode(crf.getMaxColumn())); this.maxColumnInclusive = crf.getMaxColumnInclusive(); break; case DependentColumnFilter: { DependentColumnFilter dcf = (DependentColumnFilter) filter; this.family = Bytes.toString(Base64.getEncoder().encode(dcf.getFamily())); byte[] qualifier = dcf.getQualifier(); if (qualifier != null) { this.qualifier = Bytes.toString(Base64.getEncoder().encode(qualifier)); } this.op = dcf.getCompareOperator().toString(); this.comparator = new ByteArrayComparableModel(dcf.getComparator()); this.dropDependentColumn = dcf.dropDependentColumn(); } break; case FilterList: this.op = ((FilterList) filter).getOperator().toString(); this.filters = new ArrayList<>(); for (Filter child : ((FilterList) filter).getFilters()) { this.filters.add(new FilterModel(child)); } break; case FirstKeyOnlyFilter: case KeyOnlyFilter: break; case InclusiveStopFilter: this.value = Bytes .toString(Base64.getEncoder().encode(((InclusiveStopFilter) filter).getStopRowKey())); break; case MultipleColumnPrefixFilter: this.prefixes = new ArrayList<>(); for (byte[] prefix : ((MultipleColumnPrefixFilter) filter).getPrefix()) { this.prefixes.add(Bytes.toString(Base64.getEncoder().encode(prefix))); } break; case MultiRowRangeFilter: this.ranges = new ArrayList<>(); for (RowRange range : ((MultiRowRangeFilter) filter).getRowRanges()) { this.ranges.add(new RowRangeModel(range)); } break; case PageFilter: this.value = Long.toString(((PageFilter) filter).getPageSize()); break; case PrefixFilter: this.value = Bytes.toString(Base64.getEncoder().encode(((PrefixFilter) filter).getPrefix())); break; case FamilyFilter: case QualifierFilter: case RowFilter: case ValueFilter: this.op = ((CompareFilter) filter).getCompareOperator().toString(); this.comparator = new ByteArrayComparableModel(((CompareFilter) filter).getComparator()); break; case RandomRowFilter: this.chance = ((RandomRowFilter) filter).getChance(); break; case SingleColumnValueExcludeFilter: case SingleColumnValueFilter: { SingleColumnValueFilter scvf = (SingleColumnValueFilter) filter; this.family = Bytes.toString(Base64.getEncoder().encode(scvf.getFamily())); byte[] qualifier = scvf.getQualifier(); if (qualifier != null) { this.qualifier = Bytes.toString(Base64.getEncoder().encode(qualifier)); } this.op = scvf.getCompareOperator().toString(); this.comparator = new ByteArrayComparableModel(scvf.getComparator()); if (scvf.getFilterIfMissing()) { this.ifMissing = true; } if (scvf.getLatestVersionOnly()) { this.latestVersion = true; } } break; case SkipFilter: this.filters = new ArrayList<>(); this.filters.add(new FilterModel(((SkipFilter) filter).getFilter())); break; case TimestampsFilter: this.timestamps = ((TimestampsFilter) filter).getTimestamps(); break; case WhileMatchFilter: this.filters = new ArrayList<>(); this.filters.add(new FilterModel(((WhileMatchFilter) filter).getFilter())); break; case FuzzyRowFilter: this.fuzzyKeys = new ArrayList<>(((FuzzyRowFilter) filter).getFuzzyKeys().size()); for (Pair<byte[], byte[]> keyWithMask : ((FuzzyRowFilter) filter).getFuzzyKeys()) { this.fuzzyKeys.add(new FuzzyKeyModel(keyWithMask)); } break; default: throw new RuntimeException("unhandled filter type " + type); } } public Filter build() { Filter filter; switch (FilterType.valueOf(type)) { case ColumnCountGetFilter: filter = new ColumnCountGetFilter(limit); break; case ColumnPaginationFilter: filter = new ColumnPaginationFilter(limit, offset); break; case ColumnPrefixFilter: filter = new ColumnPrefixFilter(Base64.getDecoder().decode(value)); break; case ColumnRangeFilter: filter = new ColumnRangeFilter(Base64.getDecoder().decode(minColumn), minColumnInclusive, Base64.getDecoder().decode(maxColumn), maxColumnInclusive); break; case DependentColumnFilter: filter = new DependentColumnFilter(Base64.getDecoder().decode(family), qualifier != null ? Base64.getDecoder().decode(qualifier) : null, dropDependentColumn, CompareOperator.valueOf(op), comparator.build()); break; case FamilyFilter: filter = new FamilyFilter(CompareOperator.valueOf(op), comparator.build()); break; case FilterList: { List<Filter> list = new ArrayList<>(filters.size()); for (FilterModel model : filters) { list.add(model.build()); } filter = new FilterList(FilterList.Operator.valueOf(op), list); } break; case FirstKeyOnlyFilter: filter = new FirstKeyOnlyFilter(); break; case InclusiveStopFilter: filter = new InclusiveStopFilter(Base64.getDecoder().decode(value)); break; case KeyOnlyFilter: filter = new KeyOnlyFilter(); break; case MultipleColumnPrefixFilter: { byte[][] values = new byte[prefixes.size()][]; for (int i = 0; i < prefixes.size(); i++) { values[i] = Base64.getDecoder().decode(prefixes.get(i)); } filter = new MultipleColumnPrefixFilter(values); } break; case MultiRowRangeFilter: { ArrayList<MultiRowRangeFilter.RowRange> rowRanges = new ArrayList<>(ranges.size()); for (RowRangeModel rangeModel : ranges) { rowRanges.add(rangeModel.build()); } filter = new MultiRowRangeFilter(rowRanges); } break; case PageFilter: filter = new PageFilter(Long.parseLong(value)); break; case PrefixFilter: filter = new PrefixFilter(Base64.getDecoder().decode(value)); break; case QualifierFilter: filter = new QualifierFilter(CompareOperator.valueOf(op), comparator.build()); break; case RandomRowFilter: filter = new RandomRowFilter(chance); break; case RowFilter: filter = new RowFilter(CompareOperator.valueOf(op), comparator.build()); break; case SingleColumnValueFilter: filter = new SingleColumnValueFilter(Base64.getDecoder().decode(family), qualifier != null ? Base64.getDecoder().decode(qualifier) : null, CompareOperator.valueOf(op), comparator.build()); if (ifMissing != null) { ((SingleColumnValueFilter) filter).setFilterIfMissing(ifMissing); } if (latestVersion != null) { ((SingleColumnValueFilter) filter).setLatestVersionOnly(latestVersion); } break; case SingleColumnValueExcludeFilter: filter = new SingleColumnValueExcludeFilter(Base64.getDecoder().decode(family), qualifier != null ? Base64.getDecoder().decode(qualifier) : null, CompareOperator.valueOf(op), comparator.build()); if (ifMissing != null) { ((SingleColumnValueExcludeFilter) filter).setFilterIfMissing(ifMissing); } if (latestVersion != null) { ((SingleColumnValueExcludeFilter) filter).setLatestVersionOnly(latestVersion); } break; case SkipFilter: filter = new SkipFilter(filters.get(0).build()); break; case TimestampsFilter: filter = new TimestampsFilter(timestamps); break; case ValueFilter: filter = new ValueFilter(CompareOperator.valueOf(op), comparator.build()); break; case WhileMatchFilter: filter = new WhileMatchFilter(filters.get(0).build()); break; case FuzzyRowFilter: { ArrayList<Pair<byte[], byte[]>> fuzzyKeyArgs = new ArrayList<>(fuzzyKeys.size()); for (FuzzyKeyModel keyModel : fuzzyKeys) { fuzzyKeyArgs.add(keyModel.build()); } filter = new FuzzyRowFilter(fuzzyKeyArgs); } break; default: throw new RuntimeException("unhandled filter type: " + type); } return filter; } } /** * Get the <code>JacksonJaxbJsonProvider</code> instance; * @return A <code>JacksonJaxbJsonProvider</code>. */ private static JacksonJaxbJsonProvider getJasonProvider() { return JaxbJsonProviderHolder.INSTANCE; } /** * @param s the JSON representation of the filter * @return the filter */ public static Filter buildFilter(String s) throws Exception { FilterModel model = getJasonProvider().locateMapper(FilterModel.class, MediaType.APPLICATION_JSON_TYPE) .readValue(s, FilterModel.class); return model.build(); } /** * @param filter the filter * @return the JSON representation of the filter */ public static String stringifyFilter(final Filter filter) throws Exception { return getJasonProvider().locateMapper(FilterModel.class, MediaType.APPLICATION_JSON_TYPE) .writeValueAsString(new FilterModel(filter)); } private static final byte[] COLUMN_DIVIDER = Bytes.toBytes(":"); /** * @param scan the scan specification */ public static ScannerModel fromScan(Scan scan) throws Exception { ScannerModel model = new ScannerModel(); model.setStartRow(scan.getStartRow()); model.setEndRow(scan.getStopRow()); Map<byte[], NavigableSet<byte[]>> families = scan.getFamilyMap(); if (families != null) { for (Map.Entry<byte[], NavigableSet<byte[]>> entry : families.entrySet()) { if (entry.getValue() != null) { for (byte[] qualifier : entry.getValue()) { model.addColumn(Bytes.add(entry.getKey(), COLUMN_DIVIDER, qualifier)); } } else { model.addColumn(entry.getKey()); } } } model.setStartTime(scan.getTimeRange().getMin()); model.setEndTime(scan.getTimeRange().getMax()); int caching = scan.getCaching(); if (caching > 0) { model.setCaching(caching); } int batch = scan.getBatch(); if (batch > 0) { model.setBatch(batch); } int maxVersions = scan.getMaxVersions(); if (maxVersions > 0) { model.setMaxVersions(maxVersions); } if (scan.getLimit() > 0) { model.setLimit(scan.getLimit()); } Filter filter = scan.getFilter(); if (filter != null) { model.setFilter(stringifyFilter(filter)); } // Add the visbility labels if found in the attributes Authorizations authorizations = scan.getAuthorizations(); if (authorizations != null) { List<String> labels = authorizations.getLabels(); for (String label : labels) { model.addLabel(label); } } model.setIncludeStartRow(scan.includeStartRow()); model.setIncludeStopRow(scan.includeStopRow()); return model; } /** * Default constructor */ public ScannerModel() { } /** * Constructor * @param startRow the start key of the row-range * @param endRow the end key of the row-range * @param columns the columns to scan * @param batch the number of values to return in batch * @param caching the number of rows that the scanner will fetch at once * @param endTime the upper bound on timestamps of values of interest * @param maxVersions the maximum number of versions to return * @param filter a filter specification (values with timestamps later than this are excluded) */ public ScannerModel(byte[] startRow, byte[] endRow, List<byte[]> columns, int batch, int caching, long endTime, int maxVersions, String filter) { super(); this.startRow = startRow; this.endRow = endRow; this.columns = columns; this.batch = batch; this.caching = caching; this.endTime = endTime; this.maxVersions = maxVersions; this.filter = filter; } /** * Constructor * @param startRow the start key of the row-range * @param endRow the end key of the row-range * @param columns the columns to scan * @param batch the number of values to return in batch * @param caching the number of rows that the scanner will fetch at once * @param startTime the lower bound on timestamps of values of interest (values with timestamps * earlier than this are excluded) * @param endTime the upper bound on timestamps of values of interest (values with timestamps * later than this are excluded) * @param filter a filter specification */ public ScannerModel(byte[] startRow, byte[] endRow, List<byte[]> columns, int batch, int caching, long startTime, long endTime, String filter) { super(); this.startRow = startRow; this.endRow = endRow; this.columns = columns; this.batch = batch; this.caching = caching; this.startTime = startTime; this.endTime = endTime; this.filter = filter; } /** * Add a column to the column set * @param column the column name, as &lt;column&gt;(:&lt;qualifier&gt;)? */ public void addColumn(byte[] column) { columns.add(column); } /** * Add a visibility label to the scan */ public void addLabel(String label) { labels.add(label); } /** Returns true if a start row was specified */ public boolean hasStartRow() { return !Bytes.equals(startRow, HConstants.EMPTY_START_ROW); } /** Returns start row */ @XmlAttribute public byte[] getStartRow() { return startRow; } /** Returns true if an end row was specified */ public boolean hasEndRow() { return !Bytes.equals(endRow, HConstants.EMPTY_END_ROW); } /** Returns end row */ @XmlAttribute public byte[] getEndRow() { return endRow; } /** Returns list of columns of interest in column:qualifier format, or empty for all */ @XmlElement(name = "column") public List<byte[]> getColumns() { return columns; } @XmlElement(name = "labels") public List<String> getLabels() { return labels; } /** Returns the number of cells to return in batch */ @XmlAttribute public int getBatch() { return batch; } /** Returns the number of rows that the scanner to fetch at once */ @XmlAttribute public int getCaching() { return caching; } /** Returns the limit specification */ @XmlAttribute public int getLimit() { return limit; } /** Returns true if HFile blocks should be cached on the servers for this scan, false otherwise */ @XmlAttribute public boolean getCacheBlocks() { return cacheBlocks; } /** Returns the lower bound on timestamps of items of interest */ @XmlAttribute public long getStartTime() { return startTime; } /** Returns the upper bound on timestamps of items of interest */ @XmlAttribute public long getEndTime() { return endTime; } /** Returns maximum number of versions to return */ @XmlAttribute public int getMaxVersions() { return maxVersions; } /** Returns the filter specification */ @XmlElement public String getFilter() { return filter; } /** * @param startRow start row */ public void setStartRow(byte[] startRow) { this.startRow = startRow; } /** * @param endRow end row */ public void setEndRow(byte[] endRow) { this.endRow = endRow; } /** * @param columns list of columns of interest in column:qualifier format, or empty for all */ public void setColumns(List<byte[]> columns) { this.columns = columns; } /** * @param batch the number of cells to return in batch */ public void setBatch(int batch) { this.batch = batch; } /** * @param caching the number of rows to fetch at once */ public void setCaching(int caching) { this.caching = caching; } /** * @param value true if HFile blocks should be cached on the servers for this scan, false * otherwise */ public void setCacheBlocks(boolean value) { this.cacheBlocks = value; } /** * @param limit the number of rows can fetch of each scanner at lifetime */ public void setLimit(int limit) { this.limit = limit; } /** * @param maxVersions maximum number of versions to return */ public void setMaxVersions(int maxVersions) { this.maxVersions = maxVersions; } /** * @param startTime the lower bound on timestamps of values of interest */ public void setStartTime(long startTime) { this.startTime = startTime; } /** * @param endTime the upper bound on timestamps of values of interest */ public void setEndTime(long endTime) { this.endTime = endTime; } /** * @param filter the filter specification */ public void setFilter(String filter) { this.filter = filter; } @Override public Message messageFromObject() { Scanner.Builder builder = Scanner.newBuilder(); if (!Bytes.equals(startRow, HConstants.EMPTY_START_ROW)) { builder.setStartRow(UnsafeByteOperations.unsafeWrap(startRow)); } if (!Bytes.equals(endRow, HConstants.EMPTY_START_ROW)) { builder.setEndRow(UnsafeByteOperations.unsafeWrap(endRow)); } for (byte[] column : columns) { builder.addColumns(UnsafeByteOperations.unsafeWrap(column)); } if (startTime != 0) { builder.setStartTime(startTime); } if (endTime != 0) { builder.setEndTime(endTime); } builder.setBatch(getBatch()); if (caching > 0) { builder.setCaching(caching); } if (limit > 0) { builder.setLimit(limit); } builder.setMaxVersions(maxVersions); if (filter != null) { builder.setFilter(filter); } if (labels != null && labels.size() > 0) { for (String label : labels) builder.addLabels(label); } builder.setCacheBlocks(cacheBlocks); builder.setIncludeStartRow(includeStartRow); builder.setIncludeStopRow(includeStopRow); return builder.build(); } @Override public ProtobufMessageHandler getObjectFromMessage(CodedInputStream cis) throws IOException { Scanner.Builder builder = Scanner.newBuilder(); RestUtil.mergeFrom(builder, cis); if (builder.hasStartRow()) { startRow = builder.getStartRow().toByteArray(); } if (builder.hasEndRow()) { endRow = builder.getEndRow().toByteArray(); } for (ByteString column : builder.getColumnsList()) { addColumn(column.toByteArray()); } if (builder.hasBatch()) { batch = builder.getBatch(); } if (builder.hasCaching()) { caching = builder.getCaching(); } if (builder.hasLimit()) { limit = builder.getLimit(); } if (builder.hasStartTime()) { startTime = builder.getStartTime(); } if (builder.hasEndTime()) { endTime = builder.getEndTime(); } if (builder.hasMaxVersions()) { maxVersions = builder.getMaxVersions(); } if (builder.hasFilter()) { filter = builder.getFilter(); } if (builder.getLabelsList() != null) { List<String> labels = builder.getLabelsList(); for (String label : labels) { addLabel(label); } } if (builder.hasCacheBlocks()) { this.cacheBlocks = builder.getCacheBlocks(); } if (builder.hasIncludeStartRow()) { this.includeStartRow = builder.getIncludeStartRow(); } if (builder.hasIncludeStopRow()) { this.includeStopRow = builder.getIncludeStopRow(); } return this; } }
apache/nifi
36,370
nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenHTTP.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.processors.standard; import jakarta.servlet.DispatcherType; import jakarta.servlet.Servlet; import jakarta.servlet.http.HttpServletResponse; import jakarta.ws.rs.Path; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; import org.apache.nifi.annotation.documentation.CapabilityDescription; import org.apache.nifi.annotation.documentation.MultiProcessorUseCase; import org.apache.nifi.annotation.documentation.ProcessorConfiguration; import org.apache.nifi.annotation.documentation.Tags; import org.apache.nifi.annotation.documentation.UseCase; import org.apache.nifi.annotation.lifecycle.OnScheduled; import org.apache.nifi.annotation.lifecycle.OnShutdown; import org.apache.nifi.annotation.lifecycle.OnStopped; import org.apache.nifi.annotation.notification.OnPrimaryNodeStateChange; import org.apache.nifi.annotation.notification.PrimaryNodeState; import org.apache.nifi.components.AllowableValue; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.components.PropertyValue; import org.apache.nifi.components.ValidationContext; import org.apache.nifi.components.ValidationResult; import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.jetty.configuration.connector.StandardServerConnectorFactory; import org.apache.nifi.migration.PropertyConfiguration; import org.apache.nifi.processor.AbstractSessionFactoryProcessor; import org.apache.nifi.processor.DataUnit; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; import org.apache.nifi.processor.ProcessSessionFactory; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.processors.standard.filters.HttpMethodFilter; import org.apache.nifi.processors.standard.http.HttpProtocolStrategy; import org.apache.nifi.processors.standard.servlets.ContentAcknowledgmentServlet; import org.apache.nifi.processors.standard.servlets.HealthCheckServlet; import org.apache.nifi.processors.standard.servlets.ListenHTTPServlet; import org.apache.nifi.scheduling.ExecutionNode; import org.apache.nifi.security.util.ClientAuth; import org.apache.nifi.serialization.RecordReaderFactory; import org.apache.nifi.serialization.RecordSetWriterFactory; import org.apache.nifi.ssl.SSLContextProvider; import org.eclipse.jetty.ee11.servlet.ServletContextHandler; import org.eclipse.jetty.ee11.servlet.ServletHolder; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.ServerConnector; import org.eclipse.jetty.util.thread.QueuedThreadPool; import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManager; import javax.net.ssl.TrustManagerFactory; import javax.net.ssl.X509TrustManager; import java.security.KeyStore; import java.security.cert.X509Certificate; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.EnumSet; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.regex.Pattern; @InputRequirement(Requirement.INPUT_FORBIDDEN) @Tags({"ingest", "http", "https", "rest", "listen"}) @CapabilityDescription(""" Starts an HTTP Server and listens on a given base path to transform incoming requests into FlowFiles. The default URI of the Service will be http://{hostname}:{port}/contentListener. Only HEAD and POST requests are supported. GET, PUT, DELETE, OPTIONS and TRACE will result in an error and the HTTP response status code 405; CONNECT will also result in an error and the HTTP response status code 400. GET is supported on <service_URI>/healthcheck. If the service is available, it returns \"200 OK\" with the content \"OK\". The health check functionality can be configured to be accessible via a different port. For details, see the documentation of the \"Listening Port for health check requests\" property. A Record Reader and Record Writer property can be enabled on the processor to process incoming requests as records. Record processing is not allowed for multipart requests and request in FlowFileV3 format (minifi). If the incoming request contains a FlowFileV3 package format, the data will be unpacked automatically into individual FlowFile(s) contained within the package; the original FlowFile names are restored. """) @UseCase( description = "Unpack FlowFileV3 content received in a POST", keywords = {"flowfile", "flowfilev3", "unpack"}, notes = """ POST requests with "Content-Type: application/flowfile-v3" will have their payload interpreted as FlowFileV3 format and will be automatically unpacked. This will output the original FlowFile(s) from within the FlowFileV3 format and will not require a separate UnpackContent processor. """, configuration = """ This feature of ListenHTTP is always on, no configuration required. The MergeContent and PackageFlowFile processors can generate FlowFileV3 formatted data. """ ) @MultiProcessorUseCase( description = "Limit the date flow rate that is accepted", keywords = {"rate", "limit"}, notes = """ When ListenHTTP cannot output FlowFiles due to back pressure, it will send HTTP 503 Service Unavailable response to clients, or deny connections, until more space is available in the output queue. """, configurations = { @ProcessorConfiguration( processorClass = ListenHTTP.class, configuration = """ Connect the 'success' relationship of ListenHTTP to a ControlRate processor and configure back pressure on that connection so that a small amount of data will fill the queue. The size of the back pressure configuration determines how much data to buffer to handle spikes in rate without affecting clients. """ ), @ProcessorConfiguration( processorClass = ControlRate.class, configuration = """ Use the ControlRate properties to set the desired data flow rate limit. When the limit it reached, the ControlRate input connection will start accumulating files. When this connection is full, ListenHTTP will limit the input data flow rate. """) } ) public class ListenHTTP extends AbstractSessionFactoryProcessor { private static final String MATCH_ALL = ".*"; private final AtomicBoolean initialized = new AtomicBoolean(false); private final AtomicBoolean runOnPrimary = new AtomicBoolean(false); public enum ClientAuthentication { AUTO("Inferred based on SSL Context Service properties. The presence of Trust Store properties implies REQUIRED, otherwise NONE is configured."), WANT(ClientAuth.WANT.getDescription()), REQUIRED(ClientAuth.REQUIRED.getDescription()), NONE(ClientAuth.NONE.getDescription()); private final String description; ClientAuthentication(final String description) { this.description = description; } public String getDescription() { return description; } public AllowableValue getAllowableValue() { return new AllowableValue(name(), name(), description); } } public static final PropertyDescriptor BASE_PATH = new PropertyDescriptor.Builder() .name("Base Path") .description("Base path for incoming connections") .required(true) .expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT) .defaultValue("contentListener") .addValidator(StandardValidators.URI_VALIDATOR) .addValidator(StandardValidators.createRegexMatchingValidator(Pattern.compile("(^[^/]+.*[^/]+$|^[^/]+$|^$)"))) // no start with / or end with / .build(); public static final PropertyDescriptor PORT = new PropertyDescriptor.Builder() .name("Listening Port") .description("The Port to listen on for incoming connections") .required(true) .expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT) .addValidator(StandardValidators.PORT_VALIDATOR) .build(); public static final PropertyDescriptor HEALTH_CHECK_PORT = new PropertyDescriptor.Builder() .name("health-check-port") .displayName("Listening Port for Health Check Requests") .description("The port to listen on for incoming health check requests. " + "If set, it must be different from the Listening Port. " + "Configure this port if the processor is set to use two-way SSL and a load balancer that does not support client authentication for " + "health check requests is used. " + "Only /<base_path>/healthcheck service is available via this port and only GET and HEAD requests are supported. " + "If the processor is set not to use SSL, SSL will not be used on this port, either. " + "If the processor is set to use one-way SSL, one-way SSL will be used on this port. " + "If the processor is set to use two-way SSL, one-way SSL will be used on this port (client authentication not required).") .required(false) .expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT) .addValidator(StandardValidators.PORT_VALIDATOR) .build(); public static final PropertyDescriptor AUTHORIZED_DN_PATTERN = new PropertyDescriptor.Builder() .name("Authorized DN Pattern") .displayName("Authorized Subject DN Pattern") .description("A Regular Expression to apply against the Subject's Distinguished Name of incoming connections. If the Pattern does not match the Subject DN, " + "the the processor will respond with a status of HTTP 403 Forbidden.") .required(true) .defaultValue(MATCH_ALL) .addValidator(StandardValidators.REGULAR_EXPRESSION_VALIDATOR) .build(); public static final PropertyDescriptor AUTHORIZED_ISSUER_DN_PATTERN = new PropertyDescriptor.Builder() .name("authorized-issuer-dn-pattern") .displayName("Authorized Issuer DN Pattern") .description("A Regular Expression to apply against the Issuer's Distinguished Name of incoming connections. If the Pattern does not match the Issuer DN, " + "the processor will respond with a status of HTTP 403 Forbidden.") .required(false) .defaultValue(MATCH_ALL) .addValidator(StandardValidators.REGULAR_EXPRESSION_VALIDATOR) .build(); public static final PropertyDescriptor MAX_UNCONFIRMED_TIME = new PropertyDescriptor.Builder() .name("Max Unconfirmed Flowfile Time") .description("The maximum amount of time to wait for a FlowFile to be confirmed before it is removed from the cache") .required(true) .defaultValue("60 secs") .addValidator(StandardValidators.TIME_PERIOD_VALIDATOR) .build(); public static final PropertyDescriptor SSL_CONTEXT_SERVICE = new PropertyDescriptor.Builder() .name("SSL Context Service") .description("SSL Context Service enables support for HTTPS") .required(false) .identifiesControllerService(SSLContextProvider.class) .build(); public static final PropertyDescriptor HTTP_PROTOCOL_STRATEGY = new PropertyDescriptor.Builder() .name("HTTP Protocols") .description("HTTP Protocols supported for Application Layer Protocol Negotiation with TLS") .required(true) .allowableValues(HttpProtocolStrategy.class) .defaultValue(HttpProtocolStrategy.HTTP_1_1) .dependsOn(SSL_CONTEXT_SERVICE) .build(); public static final PropertyDescriptor HEADERS_AS_ATTRIBUTES_REGEX = new PropertyDescriptor.Builder() .name("HTTP Headers to receive as Attributes (Regex)") .description("Specifies the Regular Expression that determines the names of HTTP Headers that should be passed along as FlowFile attributes") .addValidator(StandardValidators.REGULAR_EXPRESSION_VALIDATOR) .required(false) .build(); public static final PropertyDescriptor REQUEST_HEADER_MAX_SIZE = new PropertyDescriptor.Builder() .name("Request Header Maximum Size") .description("The maximum supported size of HTTP headers in requests sent to this processor") .required(true) .addValidator(StandardValidators.DATA_SIZE_VALIDATOR) .defaultValue("8 KB") .build(); public static final PropertyDescriptor RETURN_CODE = new PropertyDescriptor.Builder() .name("Return Code") .description("The HTTP return code returned after every HTTP call") .defaultValue(String.valueOf(HttpServletResponse.SC_OK)) .addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR) .build(); public static final PropertyDescriptor MULTIPART_REQUEST_MAX_SIZE = new PropertyDescriptor.Builder() .name("multipart-request-max-size") .displayName("Multipart Request Max Size") .description("The max size of the request. Only applies for requests with Content-Type: multipart/form-data, " + "and is used to prevent denial of service type of attacks, to prevent filling up the heap or disk space") .required(true) .addValidator(StandardValidators.DATA_SIZE_VALIDATOR) .defaultValue("1 MB") .build(); public static final PropertyDescriptor MULTIPART_READ_BUFFER_SIZE = new PropertyDescriptor.Builder() .name("multipart-read-buffer-size") .displayName("Multipart Read Buffer Size") .description("The threshold size, at which the contents of an incoming file would be written to disk. " + "Only applies for requests with Content-Type: multipart/form-data. " + "It is used to prevent denial of service type of attacks, to prevent filling up the heap or disk space.") .required(true) .addValidator(StandardValidators.DATA_SIZE_VALIDATOR) .defaultValue("512 KB") .build(); public static final PropertyDescriptor CLIENT_AUTHENTICATION = new PropertyDescriptor.Builder() .name("client-authentication") .displayName("Client Authentication") .description("Client Authentication policy for TLS connections. Required when SSL Context Service configured.") .required(false) .allowableValues(Arrays.stream(ClientAuthentication.values()) .map(ClientAuthentication::getAllowableValue) .toList() .toArray(new AllowableValue[]{})) .defaultValue(ClientAuthentication.AUTO.name()) .dependsOn(SSL_CONTEXT_SERVICE) .build(); public static final PropertyDescriptor MAX_THREAD_POOL_SIZE = new PropertyDescriptor.Builder() .name("max-thread-pool-size") .displayName("Maximum Thread Pool Size") .description("The maximum number of threads to be used by the embedded Jetty server. " + "The value can be set between 8 and 1000. " + "The value of this property affects the performance of the flows and the operating system, therefore " + "the default value should only be changed in justified cases. " + "A value that is less than the default value may be suitable " + "if only a small number of HTTP clients connect to the server. A greater value may be suitable " + "if a large number of HTTP clients are expected to make requests to the server simultaneously.") .required(true) .addValidator(StandardValidators.createLongValidator(8L, 1000L, true)) .defaultValue("200") .build(); public static final PropertyDescriptor RECORD_READER = new PropertyDescriptor.Builder() .name("record-reader") .displayName("Record Reader") .description("The Record Reader to use parsing the incoming FlowFile into Records") .required(false) .identifiesControllerService(RecordReaderFactory.class) .build(); public static final PropertyDescriptor RECORD_WRITER = new PropertyDescriptor.Builder() .name("record-writer") .displayName("Record Writer") .description("The Record Writer to use for serializing Records after they have been transformed") .required(true) .identifiesControllerService(RecordSetWriterFactory.class) .dependsOn(RECORD_READER) .build(); protected static final List<PropertyDescriptor> PROPERTY_DESCRIPTORS = List.of( BASE_PATH, PORT, HEALTH_CHECK_PORT, SSL_CONTEXT_SERVICE, HTTP_PROTOCOL_STRATEGY, CLIENT_AUTHENTICATION, AUTHORIZED_DN_PATTERN, AUTHORIZED_ISSUER_DN_PATTERN, MAX_UNCONFIRMED_TIME, HEADERS_AS_ATTRIBUTES_REGEX, REQUEST_HEADER_MAX_SIZE, RETURN_CODE, MULTIPART_REQUEST_MAX_SIZE, MULTIPART_READ_BUFFER_SIZE, MAX_THREAD_POOL_SIZE, RECORD_READER, RECORD_WRITER ); public static final Relationship RELATIONSHIP_SUCCESS = new Relationship.Builder() .name("success") .description("Relationship for successfully received FlowFiles") .build(); private static final Set<Relationship> RELATIONSHIPS = Set.of( RELATIONSHIP_SUCCESS ); public static final String CONTEXT_ATTRIBUTE_PROCESSOR = "processor"; public static final String CONTEXT_ATTRIBUTE_LOGGER = "logger"; public static final String CONTEXT_ATTRIBUTE_SESSION_FACTORY_HOLDER = "sessionFactoryHolder"; public static final String CONTEXT_ATTRIBUTE_PROCESS_CONTEXT_HOLDER = "processContextHolder"; public static final String CONTEXT_ATTRIBUTE_AUTHORITY_PATTERN = "authorityPattern"; public static final String CONTEXT_ATTRIBUTE_AUTHORITY_ISSUER_PATTERN = "authorityIssuerPattern"; public static final String CONTEXT_ATTRIBUTE_HEADER_PATTERN = "headerPattern"; public static final String CONTEXT_ATTRIBUTE_FLOWFILE_MAP = "flowFileMap"; public static final String CONTEXT_ATTRIBUTE_BASE_PATH = "basePath"; public static final String CONTEXT_ATTRIBUTE_RETURN_CODE = "returnCode"; public static final String CONTEXT_ATTRIBUTE_MULTIPART_REQUEST_MAX_SIZE = "multipartRequestMaxSize"; public static final String CONTEXT_ATTRIBUTE_MULTIPART_READ_BUFFER_SIZE = "multipartReadBufferSize"; public static final String CONTEXT_ATTRIBUTE_PORT = "port"; private volatile Server server = null; private final ConcurrentMap<String, FlowFileEntryTimeWrapper> flowFileMap = new ConcurrentHashMap<>(); private final AtomicReference<ProcessSessionFactory> sessionFactoryReference = new AtomicReference<>(); @Override protected Collection<ValidationResult> customValidate(ValidationContext validationContext) { final List<ValidationResult> validationResults = new ArrayList<>(super.customValidate(validationContext)); validatePortsAreNotEqual(validationContext, validationResults); return validationResults; } private void validatePortsAreNotEqual(ValidationContext context, Collection<ValidationResult> validationResults) { Integer healthCheckPort = context.getProperty(HEALTH_CHECK_PORT).evaluateAttributeExpressions().asInteger(); if (healthCheckPort != null) { Integer port = context.getProperty(PORT).evaluateAttributeExpressions().asInteger(); if (port.equals(healthCheckPort)) { String explanation = String.format("'%s' and '%s' cannot have the same value.", PORT.getDisplayName(), HEALTH_CHECK_PORT.getDisplayName()); validationResults.add(createValidationResult(HEALTH_CHECK_PORT.getDisplayName(), explanation)); } } } private ValidationResult createValidationResult(String subject, String explanation) { return new ValidationResult.Builder().subject(subject).valid(false).explanation(explanation).build(); } @Override public Set<Relationship> getRelationships() { return RELATIONSHIPS; } @Override protected List<PropertyDescriptor> getSupportedPropertyDescriptors() { return PROPERTY_DESCRIPTORS; } @Override public void migrateProperties(PropertyConfiguration config) { super.migrateProperties(config); if (config.removeProperty("Max Data to Receive per Second")) { getLogger().warn("ListenHTTP rate limit feature was removed. Please see ListenHTTP documentation for alternatives."); } } @OnShutdown @OnStopped public void shutdownHttpServer() { final Server toShutdown = this.server; if (toShutdown == null) { return; } shutdownHttpServer(toShutdown); } Server getServer() { return this.server; } private void shutdownHttpServer(Server toShutdown) { try { toShutdown.stop(); toShutdown.destroy(); clearInit(); } catch (final Exception ex) { getLogger().warn("unable to cleanly shutdown embedded server", ex); this.server = null; } } synchronized private void createHttpServerFromService(final ProcessContext context) throws Exception { if (initialized.get()) { return; } runOnPrimary.set(context.getExecutionNode().equals(ExecutionNode.PRIMARY)); final String basePath = context.getProperty(BASE_PATH).evaluateAttributeExpressions().getValue(); final SSLContextProvider sslContextProvider = context.getProperty(SSL_CONTEXT_SERVICE).asControllerService(SSLContextProvider.class); final int returnCode = context.getProperty(RETURN_CODE).asInteger(); final long requestMaxSize = context.getProperty(MULTIPART_REQUEST_MAX_SIZE).asDataSize(DataUnit.B).longValue(); final int readBufferSize = context.getProperty(MULTIPART_READ_BUFFER_SIZE).asDataSize(DataUnit.B).intValue(); final int maxThreadPoolSize = context.getProperty(MAX_THREAD_POOL_SIZE).asInteger(); final int requestHeaderSize = context.getProperty(REQUEST_HEADER_MAX_SIZE).asDataSize(DataUnit.B).intValue(); final ClientAuthentication clientAuthentication = getClientAuthentication(sslContextProvider, context); // thread pool for the jetty instance final QueuedThreadPool threadPool = new QueuedThreadPool(maxThreadPoolSize); threadPool.setName(String.format("%s (%s) Web Server", getClass().getSimpleName(), getIdentifier())); // create the server instance final Server server = new Server(threadPool); // get the configured port final int port = context.getProperty(PORT).evaluateAttributeExpressions().asInteger(); final HttpProtocolStrategy httpProtocolStrategy = sslContextProvider == null ? HttpProtocolStrategy.valueOf(HTTP_PROTOCOL_STRATEGY.getDefaultValue()) : context.getProperty(HTTP_PROTOCOL_STRATEGY).asAllowableValue(HttpProtocolStrategy.class); final ServerConnector connector = createServerConnector(server, port, requestHeaderSize, sslContextProvider, clientAuthentication, httpProtocolStrategy ); server.addConnector(connector); // Add a separate connector for the health check port (if specified) final Integer healthCheckPort = context.getProperty(HEALTH_CHECK_PORT).evaluateAttributeExpressions().asInteger(); if (healthCheckPort != null) { final ServerConnector healthCheckConnector = createServerConnector(server, healthCheckPort, requestHeaderSize, sslContextProvider, ClientAuthentication.NONE, httpProtocolStrategy ); server.addConnector(healthCheckConnector); } final boolean securityEnabled = sslContextProvider != null; final ServletContextHandler contextHandler = new ServletContextHandler("/", true, securityEnabled); for (final Class<? extends Servlet> cls : getServerClasses()) { final Path path = cls.getAnnotation(Path.class); // Note: servlets must have a path annotation - this will NPE otherwise // also, servlets other than ListenHttpServlet must have a path starting with / if (basePath.isEmpty() && !path.value().isEmpty()) { // Note: this is to handle the condition of an empty uri, otherwise pathSpec would start with // contextHandler.addServlet(cls, path.value()); } else { contextHandler.addServlet(cls, "/" + basePath + path.value()); } } contextHandler.addFilter(HttpMethodFilter.class, "/*", EnumSet.allOf(DispatcherType.class)); contextHandler.setAttribute(CONTEXT_ATTRIBUTE_PROCESSOR, this); contextHandler.setAttribute(CONTEXT_ATTRIBUTE_LOGGER, getLogger()); contextHandler.setAttribute(CONTEXT_ATTRIBUTE_SESSION_FACTORY_HOLDER, sessionFactoryReference); contextHandler.setAttribute(CONTEXT_ATTRIBUTE_PROCESS_CONTEXT_HOLDER, context); contextHandler.setAttribute(CONTEXT_ATTRIBUTE_FLOWFILE_MAP, flowFileMap); contextHandler.setAttribute(CONTEXT_ATTRIBUTE_AUTHORITY_PATTERN, Pattern.compile(context.getProperty(AUTHORIZED_DN_PATTERN).getValue())); contextHandler.setAttribute(CONTEXT_ATTRIBUTE_AUTHORITY_ISSUER_PATTERN, Pattern.compile(context.getProperty(AUTHORIZED_ISSUER_DN_PATTERN) .isSet() ? context.getProperty(AUTHORIZED_ISSUER_DN_PATTERN).getValue() : MATCH_ALL)); contextHandler.setAttribute(CONTEXT_ATTRIBUTE_BASE_PATH, basePath); contextHandler.setAttribute(CONTEXT_ATTRIBUTE_RETURN_CODE, returnCode); contextHandler.setAttribute(CONTEXT_ATTRIBUTE_MULTIPART_REQUEST_MAX_SIZE, requestMaxSize); contextHandler.setAttribute(CONTEXT_ATTRIBUTE_MULTIPART_READ_BUFFER_SIZE, readBufferSize); contextHandler.setAttribute(CONTEXT_ATTRIBUTE_PORT, port); if (context.getProperty(HEADERS_AS_ATTRIBUTES_REGEX).isSet()) { contextHandler.setAttribute(CONTEXT_ATTRIBUTE_HEADER_PATTERN, Pattern.compile(context.getProperty(HEADERS_AS_ATTRIBUTES_REGEX).getValue())); } server.setHandler(contextHandler); try { server.start(); } catch (Exception e) { shutdownHttpServer(server); throw e; } // If Port is set to 0, we need to notify the ListenHTTPServlet of the actual port being used. But this isn't available until after // the server has been started, and at that point it is too late to set it in the configuration for the context handler so we set it afterwards. for (final ServletHolder holder : contextHandler.getServletHandler().getServlets()) { final Servlet servlet = holder.getServlet(); if (servlet instanceof ListenHTTPServlet) { ((ListenHTTPServlet) servlet).setPort(connector.getLocalPort()); } } this.server = server; initialized.set(true); } private ClientAuthentication getClientAuthentication(final SSLContextProvider sslContextProvider, final ProcessContext context) { ClientAuthentication clientAuthentication = ClientAuthentication.NONE; if (sslContextProvider != null) { final PropertyValue clientAuthenticationProperty = context.getProperty(CLIENT_AUTHENTICATION); if (clientAuthenticationProperty.isSet()) { clientAuthentication = ClientAuthentication.valueOf(clientAuthenticationProperty.getValue()); if (clientAuthentication == ClientAuthentication.AUTO) { final X509TrustManager trustManager = sslContextProvider.createTrustManager(); if (isTrustManagerConfigured(trustManager)) { clientAuthentication = ClientAuthentication.REQUIRED; getLogger().debug("Client Authentication REQUIRED from SSLContextService Trust Manager configuration"); } } } } return clientAuthentication; } private boolean isTrustManagerConfigured(final X509TrustManager configuredTrustManager) { boolean trustManagerConfigured = false; try { final TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); trustManagerFactory.init((KeyStore) null); final TrustManager[] trustManagers = trustManagerFactory.getTrustManagers(); final TrustManager trustManager = trustManagers[0]; if (trustManager instanceof X509TrustManager defaultTrustManager) { final X509Certificate[] defaultAcceptedIssuers = defaultTrustManager.getAcceptedIssuers(); final X509Certificate[] acceptedIssuers = configuredTrustManager.getAcceptedIssuers(); trustManagerConfigured = !Arrays.deepEquals(defaultAcceptedIssuers, acceptedIssuers); } } catch (final Exception e) { getLogger().warn("Loading default SSLContext for Client Authentication evaluation failed", e); } return trustManagerConfigured; } private ServerConnector createServerConnector(final Server server, final int port, final int requestMaxHeaderSize, final SSLContextProvider sslContextProvider, final ClientAuthentication clientAuthentication, final HttpProtocolStrategy httpProtocolStrategy ) { final StandardServerConnectorFactory serverConnectorFactory = new StandardServerConnectorFactory(server, port); serverConnectorFactory.setRequestHeaderSize(requestMaxHeaderSize); final SSLContext sslContext = sslContextProvider == null ? null : sslContextProvider.createContext(); serverConnectorFactory.setSslContext(sslContext); final String[] enabledProtocols = sslContext == null ? new String[0] : sslContext.getDefaultSSLParameters().getProtocols(); serverConnectorFactory.setIncludeSecurityProtocols(enabledProtocols); if (ClientAuthentication.REQUIRED == clientAuthentication) { serverConnectorFactory.setNeedClientAuth(true); } else if (ClientAuthentication.WANT == clientAuthentication) { serverConnectorFactory.setWantClientAuth(true); } serverConnectorFactory.setApplicationLayerProtocols(httpProtocolStrategy.getApplicationLayerProtocols()); return serverConnectorFactory.getServerConnector(); } @OnScheduled public void clearInit() { initialized.set(false); } protected Set<Class<? extends Servlet>> getServerClasses() { // NOTE: Servlets added below MUST have a Path annotation // any servlets other than ListenHTTPServlet must have a Path annotation start with / return Set.of(ListenHTTPServlet.class, ContentAcknowledgmentServlet.class, HealthCheckServlet.class); } private Set<String> findOldFlowFileIds(final ProcessContext ctx) { final Set<String> old = new HashSet<>(); final long expiryMillis = ctx.getProperty(MAX_UNCONFIRMED_TIME).asTimePeriod(TimeUnit.MILLISECONDS); final long cutoffTime = System.currentTimeMillis() - expiryMillis; for (final Map.Entry<String, FlowFileEntryTimeWrapper> entry : flowFileMap.entrySet()) { final FlowFileEntryTimeWrapper wrapper = entry.getValue(); if (wrapper != null && wrapper.getEntryTime() < cutoffTime) { old.add(entry.getKey()); } } return old; } @Override public void onTrigger(final ProcessContext context, final ProcessSessionFactory sessionFactory) throws ProcessException { try { if (!initialized.get()) { createHttpServerFromService(context); } } catch (Exception e) { getLogger().warn("Failed to start http server during initialization", e); context.yield(); throw new ProcessException("Failed to initialize the server", e); } sessionFactoryReference.compareAndSet(null, sessionFactory); for (final String id : findOldFlowFileIds(context)) { final FlowFileEntryTimeWrapper wrapper = flowFileMap.remove(id); if (wrapper != null) { getLogger().warn("failed to receive acknowledgment for HOLD with ID {} sent by {}; rolling back session", id, wrapper.getClientIP()); wrapper.session.rollback(); } } context.yield(); } @OnPrimaryNodeStateChange public void onPrimaryNodeChange(final PrimaryNodeState newState) { if (runOnPrimary.get() && newState.equals(PrimaryNodeState.PRIMARY_NODE_REVOKED)) { try { shutdownHttpServer(); } catch (final Exception shutdownException) { getLogger().warn("Processor is configured to run only on Primary Node, but failed to shutdown HTTP server following revocation of primary node status due to {}", shutdownException); } } } public static class FlowFileEntryTimeWrapper { private final Set<FlowFile> flowFiles; private final long entryTime; private final ProcessSession session; private final String clientIP; public FlowFileEntryTimeWrapper(final ProcessSession session, final Set<FlowFile> flowFiles, final long entryTime, final String clientIP) { this.flowFiles = flowFiles; this.entryTime = entryTime; this.session = session; this.clientIP = clientIP; } public Set<FlowFile> getFlowFiles() { return flowFiles; } public long getEntryTime() { return entryTime; } public ProcessSession getSession() { return session; } public String getClientIP() { return clientIP; } } }
googleapis/google-cloud-java
36,077
java-kms/google-cloud-kms/src/test/java/com/google/cloud/kms/v1/EkmServiceClientHttpJsonTest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.kms.v1; import static com.google.cloud.kms.v1.EkmServiceClient.ListEkmConnectionsPagedResponse; import static com.google.cloud.kms.v1.EkmServiceClient.ListLocationsPagedResponse; import com.google.api.gax.core.NoCredentialsProvider; import com.google.api.gax.httpjson.GaxHttpJsonProperties; import com.google.api.gax.httpjson.testing.MockHttpService; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ApiException; import com.google.api.gax.rpc.ApiExceptionFactory; import com.google.api.gax.rpc.InvalidArgumentException; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.testing.FakeStatusCode; import com.google.cloud.kms.v1.stub.HttpJsonEkmServiceStub; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.common.collect.Lists; import com.google.iam.v1.AuditConfig; import com.google.iam.v1.Binding; import com.google.iam.v1.GetIamPolicyRequest; import com.google.iam.v1.GetPolicyOptions; import com.google.iam.v1.Policy; import com.google.iam.v1.SetIamPolicyRequest; import com.google.iam.v1.TestIamPermissionsRequest; import com.google.iam.v1.TestIamPermissionsResponse; import com.google.protobuf.Any; import com.google.protobuf.ByteString; import com.google.protobuf.FieldMask; import com.google.protobuf.Timestamp; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @Generated("by gapic-generator-java") public class EkmServiceClientHttpJsonTest { private static MockHttpService mockService; private static EkmServiceClient client; @BeforeClass public static void startStaticServer() throws IOException { mockService = new MockHttpService( HttpJsonEkmServiceStub.getMethodDescriptors(), EkmServiceSettings.getDefaultEndpoint()); EkmServiceSettings settings = EkmServiceSettings.newHttpJsonBuilder() .setTransportChannelProvider( EkmServiceSettings.defaultHttpJsonTransportProviderBuilder() .setHttpTransport(mockService) .build()) .setCredentialsProvider(NoCredentialsProvider.create()) .build(); client = EkmServiceClient.create(settings); } @AfterClass public static void stopServer() { client.close(); } @Before public void setUp() {} @After public void tearDown() throws Exception { mockService.reset(); } @Test public void listEkmConnectionsTest() throws Exception { EkmConnection responsesElement = EkmConnection.newBuilder().build(); ListEkmConnectionsResponse expectedResponse = ListEkmConnectionsResponse.newBuilder() .setNextPageToken("") .addAllEkmConnections(Arrays.asList(responsesElement)) .build(); mockService.addResponse(expectedResponse); LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); ListEkmConnectionsPagedResponse pagedListResponse = client.listEkmConnections(parent); List<EkmConnection> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getEkmConnectionsList().get(0), resources.get(0)); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void listEkmConnectionsExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); client.listEkmConnections(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listEkmConnectionsTest2() throws Exception { EkmConnection responsesElement = EkmConnection.newBuilder().build(); ListEkmConnectionsResponse expectedResponse = ListEkmConnectionsResponse.newBuilder() .setNextPageToken("") .addAllEkmConnections(Arrays.asList(responsesElement)) .build(); mockService.addResponse(expectedResponse); String parent = "projects/project-5833/locations/location-5833"; ListEkmConnectionsPagedResponse pagedListResponse = client.listEkmConnections(parent); List<EkmConnection> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getEkmConnectionsList().get(0), resources.get(0)); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void listEkmConnectionsExceptionTest2() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String parent = "projects/project-5833/locations/location-5833"; client.listEkmConnections(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getEkmConnectionTest() throws Exception { EkmConnection expectedResponse = EkmConnection.newBuilder() .setName(EkmConnectionName.of("[PROJECT]", "[LOCATION]", "[EKM_CONNECTION]").toString()) .setCreateTime(Timestamp.newBuilder().build()) .addAllServiceResolvers(new ArrayList<EkmConnection.ServiceResolver>()) .setEtag("etag3123477") .setCryptoSpacePath("cryptoSpacePath273829514") .build(); mockService.addResponse(expectedResponse); EkmConnectionName name = EkmConnectionName.of("[PROJECT]", "[LOCATION]", "[EKM_CONNECTION]"); EkmConnection actualResponse = client.getEkmConnection(name); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void getEkmConnectionExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { EkmConnectionName name = EkmConnectionName.of("[PROJECT]", "[LOCATION]", "[EKM_CONNECTION]"); client.getEkmConnection(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getEkmConnectionTest2() throws Exception { EkmConnection expectedResponse = EkmConnection.newBuilder() .setName(EkmConnectionName.of("[PROJECT]", "[LOCATION]", "[EKM_CONNECTION]").toString()) .setCreateTime(Timestamp.newBuilder().build()) .addAllServiceResolvers(new ArrayList<EkmConnection.ServiceResolver>()) .setEtag("etag3123477") .setCryptoSpacePath("cryptoSpacePath273829514") .build(); mockService.addResponse(expectedResponse); String name = "projects/project-5346/locations/location-5346/ekmConnections/ekmConnection-5346"; EkmConnection actualResponse = client.getEkmConnection(name); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void getEkmConnectionExceptionTest2() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String name = "projects/project-5346/locations/location-5346/ekmConnections/ekmConnection-5346"; client.getEkmConnection(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void createEkmConnectionTest() throws Exception { EkmConnection expectedResponse = EkmConnection.newBuilder() .setName(EkmConnectionName.of("[PROJECT]", "[LOCATION]", "[EKM_CONNECTION]").toString()) .setCreateTime(Timestamp.newBuilder().build()) .addAllServiceResolvers(new ArrayList<EkmConnection.ServiceResolver>()) .setEtag("etag3123477") .setCryptoSpacePath("cryptoSpacePath273829514") .build(); mockService.addResponse(expectedResponse); LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); String ekmConnectionId = "ekmConnectionId-445017408"; EkmConnection ekmConnection = EkmConnection.newBuilder().build(); EkmConnection actualResponse = client.createEkmConnection(parent, ekmConnectionId, ekmConnection); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void createEkmConnectionExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); String ekmConnectionId = "ekmConnectionId-445017408"; EkmConnection ekmConnection = EkmConnection.newBuilder().build(); client.createEkmConnection(parent, ekmConnectionId, ekmConnection); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void createEkmConnectionTest2() throws Exception { EkmConnection expectedResponse = EkmConnection.newBuilder() .setName(EkmConnectionName.of("[PROJECT]", "[LOCATION]", "[EKM_CONNECTION]").toString()) .setCreateTime(Timestamp.newBuilder().build()) .addAllServiceResolvers(new ArrayList<EkmConnection.ServiceResolver>()) .setEtag("etag3123477") .setCryptoSpacePath("cryptoSpacePath273829514") .build(); mockService.addResponse(expectedResponse); String parent = "projects/project-5833/locations/location-5833"; String ekmConnectionId = "ekmConnectionId-445017408"; EkmConnection ekmConnection = EkmConnection.newBuilder().build(); EkmConnection actualResponse = client.createEkmConnection(parent, ekmConnectionId, ekmConnection); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void createEkmConnectionExceptionTest2() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String parent = "projects/project-5833/locations/location-5833"; String ekmConnectionId = "ekmConnectionId-445017408"; EkmConnection ekmConnection = EkmConnection.newBuilder().build(); client.createEkmConnection(parent, ekmConnectionId, ekmConnection); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void updateEkmConnectionTest() throws Exception { EkmConnection expectedResponse = EkmConnection.newBuilder() .setName(EkmConnectionName.of("[PROJECT]", "[LOCATION]", "[EKM_CONNECTION]").toString()) .setCreateTime(Timestamp.newBuilder().build()) .addAllServiceResolvers(new ArrayList<EkmConnection.ServiceResolver>()) .setEtag("etag3123477") .setCryptoSpacePath("cryptoSpacePath273829514") .build(); mockService.addResponse(expectedResponse); EkmConnection ekmConnection = EkmConnection.newBuilder() .setName(EkmConnectionName.of("[PROJECT]", "[LOCATION]", "[EKM_CONNECTION]").toString()) .setCreateTime(Timestamp.newBuilder().build()) .addAllServiceResolvers(new ArrayList<EkmConnection.ServiceResolver>()) .setEtag("etag3123477") .setCryptoSpacePath("cryptoSpacePath273829514") .build(); FieldMask updateMask = FieldMask.newBuilder().build(); EkmConnection actualResponse = client.updateEkmConnection(ekmConnection, updateMask); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void updateEkmConnectionExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { EkmConnection ekmConnection = EkmConnection.newBuilder() .setName( EkmConnectionName.of("[PROJECT]", "[LOCATION]", "[EKM_CONNECTION]").toString()) .setCreateTime(Timestamp.newBuilder().build()) .addAllServiceResolvers(new ArrayList<EkmConnection.ServiceResolver>()) .setEtag("etag3123477") .setCryptoSpacePath("cryptoSpacePath273829514") .build(); FieldMask updateMask = FieldMask.newBuilder().build(); client.updateEkmConnection(ekmConnection, updateMask); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getEkmConfigTest() throws Exception { EkmConfig expectedResponse = EkmConfig.newBuilder() .setName(EkmConfigName.of("[PROJECT]", "[LOCATION]").toString()) .setDefaultEkmConnection( EkmConnectionName.of("[PROJECT]", "[LOCATION]", "[EKM_CONNECTION]").toString()) .build(); mockService.addResponse(expectedResponse); EkmConfigName name = EkmConfigName.of("[PROJECT]", "[LOCATION]"); EkmConfig actualResponse = client.getEkmConfig(name); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void getEkmConfigExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { EkmConfigName name = EkmConfigName.of("[PROJECT]", "[LOCATION]"); client.getEkmConfig(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getEkmConfigTest2() throws Exception { EkmConfig expectedResponse = EkmConfig.newBuilder() .setName(EkmConfigName.of("[PROJECT]", "[LOCATION]").toString()) .setDefaultEkmConnection( EkmConnectionName.of("[PROJECT]", "[LOCATION]", "[EKM_CONNECTION]").toString()) .build(); mockService.addResponse(expectedResponse); String name = "projects/project-4516/locations/location-4516/ekmConfig"; EkmConfig actualResponse = client.getEkmConfig(name); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void getEkmConfigExceptionTest2() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String name = "projects/project-4516/locations/location-4516/ekmConfig"; client.getEkmConfig(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void updateEkmConfigTest() throws Exception { EkmConfig expectedResponse = EkmConfig.newBuilder() .setName(EkmConfigName.of("[PROJECT]", "[LOCATION]").toString()) .setDefaultEkmConnection( EkmConnectionName.of("[PROJECT]", "[LOCATION]", "[EKM_CONNECTION]").toString()) .build(); mockService.addResponse(expectedResponse); EkmConfig ekmConfig = EkmConfig.newBuilder() .setName(EkmConfigName.of("[PROJECT]", "[LOCATION]").toString()) .setDefaultEkmConnection( EkmConnectionName.of("[PROJECT]", "[LOCATION]", "[EKM_CONNECTION]").toString()) .build(); FieldMask updateMask = FieldMask.newBuilder().build(); EkmConfig actualResponse = client.updateEkmConfig(ekmConfig, updateMask); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void updateEkmConfigExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { EkmConfig ekmConfig = EkmConfig.newBuilder() .setName(EkmConfigName.of("[PROJECT]", "[LOCATION]").toString()) .setDefaultEkmConnection( EkmConnectionName.of("[PROJECT]", "[LOCATION]", "[EKM_CONNECTION]").toString()) .build(); FieldMask updateMask = FieldMask.newBuilder().build(); client.updateEkmConfig(ekmConfig, updateMask); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void verifyConnectivityTest() throws Exception { VerifyConnectivityResponse expectedResponse = VerifyConnectivityResponse.newBuilder().build(); mockService.addResponse(expectedResponse); EkmConnectionName name = EkmConnectionName.of("[PROJECT]", "[LOCATION]", "[EKM_CONNECTION]"); VerifyConnectivityResponse actualResponse = client.verifyConnectivity(name); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void verifyConnectivityExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { EkmConnectionName name = EkmConnectionName.of("[PROJECT]", "[LOCATION]", "[EKM_CONNECTION]"); client.verifyConnectivity(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void verifyConnectivityTest2() throws Exception { VerifyConnectivityResponse expectedResponse = VerifyConnectivityResponse.newBuilder().build(); mockService.addResponse(expectedResponse); String name = "projects/project-5346/locations/location-5346/ekmConnections/ekmConnection-5346"; VerifyConnectivityResponse actualResponse = client.verifyConnectivity(name); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void verifyConnectivityExceptionTest2() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String name = "projects/project-5346/locations/location-5346/ekmConnections/ekmConnection-5346"; client.verifyConnectivity(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listLocationsTest() throws Exception { Location responsesElement = Location.newBuilder().build(); ListLocationsResponse expectedResponse = ListLocationsResponse.newBuilder() .setNextPageToken("") .addAllLocations(Arrays.asList(responsesElement)) .build(); mockService.addResponse(expectedResponse); ListLocationsRequest request = ListLocationsRequest.newBuilder() .setName("projects/project-3664") .setFilter("filter-1274492040") .setPageSize(883849137) .setPageToken("pageToken873572522") .build(); ListLocationsPagedResponse pagedListResponse = client.listLocations(request); List<Location> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getLocationsList().get(0), resources.get(0)); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void listLocationsExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { ListLocationsRequest request = ListLocationsRequest.newBuilder() .setName("projects/project-3664") .setFilter("filter-1274492040") .setPageSize(883849137) .setPageToken("pageToken873572522") .build(); client.listLocations(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getLocationTest() throws Exception { Location expectedResponse = Location.newBuilder() .setName("name3373707") .setLocationId("locationId1541836720") .setDisplayName("displayName1714148973") .putAllLabels(new HashMap<String, String>()) .setMetadata(Any.newBuilder().build()) .build(); mockService.addResponse(expectedResponse); GetLocationRequest request = GetLocationRequest.newBuilder() .setName("projects/project-9062/locations/location-9062") .build(); Location actualResponse = client.getLocation(request); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void getLocationExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { GetLocationRequest request = GetLocationRequest.newBuilder() .setName("projects/project-9062/locations/location-9062") .build(); client.getLocation(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void setIamPolicyTest() throws Exception { Policy expectedResponse = Policy.newBuilder() .setVersion(351608024) .addAllBindings(new ArrayList<Binding>()) .addAllAuditConfigs(new ArrayList<AuditConfig>()) .setEtag(ByteString.EMPTY) .build(); mockService.addResponse(expectedResponse); SetIamPolicyRequest request = SetIamPolicyRequest.newBuilder() .setResource( CryptoKeyName.of("[PROJECT]", "[LOCATION]", "[KEY_RING]", "[CRYPTO_KEY]") .toString()) .setPolicy(Policy.newBuilder().build()) .setUpdateMask(FieldMask.newBuilder().build()) .build(); Policy actualResponse = client.setIamPolicy(request); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void setIamPolicyExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { SetIamPolicyRequest request = SetIamPolicyRequest.newBuilder() .setResource( CryptoKeyName.of("[PROJECT]", "[LOCATION]", "[KEY_RING]", "[CRYPTO_KEY]") .toString()) .setPolicy(Policy.newBuilder().build()) .setUpdateMask(FieldMask.newBuilder().build()) .build(); client.setIamPolicy(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getIamPolicyTest() throws Exception { Policy expectedResponse = Policy.newBuilder() .setVersion(351608024) .addAllBindings(new ArrayList<Binding>()) .addAllAuditConfigs(new ArrayList<AuditConfig>()) .setEtag(ByteString.EMPTY) .build(); mockService.addResponse(expectedResponse); GetIamPolicyRequest request = GetIamPolicyRequest.newBuilder() .setResource( CryptoKeyName.of("[PROJECT]", "[LOCATION]", "[KEY_RING]", "[CRYPTO_KEY]") .toString()) .setOptions(GetPolicyOptions.newBuilder().build()) .build(); Policy actualResponse = client.getIamPolicy(request); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void getIamPolicyExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { GetIamPolicyRequest request = GetIamPolicyRequest.newBuilder() .setResource( CryptoKeyName.of("[PROJECT]", "[LOCATION]", "[KEY_RING]", "[CRYPTO_KEY]") .toString()) .setOptions(GetPolicyOptions.newBuilder().build()) .build(); client.getIamPolicy(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void testIamPermissionsTest() throws Exception { TestIamPermissionsResponse expectedResponse = TestIamPermissionsResponse.newBuilder().addAllPermissions(new ArrayList<String>()).build(); mockService.addResponse(expectedResponse); TestIamPermissionsRequest request = TestIamPermissionsRequest.newBuilder() .setResource( CryptoKeyName.of("[PROJECT]", "[LOCATION]", "[KEY_RING]", "[CRYPTO_KEY]") .toString()) .addAllPermissions(new ArrayList<String>()) .build(); TestIamPermissionsResponse actualResponse = client.testIamPermissions(request); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void testIamPermissionsExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { TestIamPermissionsRequest request = TestIamPermissionsRequest.newBuilder() .setResource( CryptoKeyName.of("[PROJECT]", "[LOCATION]", "[KEY_RING]", "[CRYPTO_KEY]") .toString()) .addAllPermissions(new ArrayList<String>()) .build(); client.testIamPermissions(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } }
googleapis/google-cloud-java
36,191
java-analytics-admin/proto-google-analytics-admin-v1beta/src/main/java/com/google/analytics/admin/v1beta/UpdateKeyEventRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/analytics/admin/v1beta/analytics_admin.proto // Protobuf Java Version: 3.25.8 package com.google.analytics.admin.v1beta; /** * * * <pre> * Request message for UpdateKeyEvent RPC * </pre> * * Protobuf type {@code google.analytics.admin.v1beta.UpdateKeyEventRequest} */ public final class UpdateKeyEventRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.analytics.admin.v1beta.UpdateKeyEventRequest) UpdateKeyEventRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateKeyEventRequest.newBuilder() to construct. private UpdateKeyEventRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateKeyEventRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateKeyEventRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.analytics.admin.v1beta.AnalyticsAdminProto .internal_static_google_analytics_admin_v1beta_UpdateKeyEventRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.analytics.admin.v1beta.AnalyticsAdminProto .internal_static_google_analytics_admin_v1beta_UpdateKeyEventRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.analytics.admin.v1beta.UpdateKeyEventRequest.class, com.google.analytics.admin.v1beta.UpdateKeyEventRequest.Builder.class); } private int bitField0_; public static final int KEY_EVENT_FIELD_NUMBER = 1; private com.google.analytics.admin.v1beta.KeyEvent keyEvent_; /** * * * <pre> * Required. The Key Event to update. * The `name` field is used to identify the settings to be updated. * </pre> * * <code> * .google.analytics.admin.v1beta.KeyEvent key_event = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the keyEvent field is set. */ @java.lang.Override public boolean hasKeyEvent() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The Key Event to update. * The `name` field is used to identify the settings to be updated. * </pre> * * <code> * .google.analytics.admin.v1beta.KeyEvent key_event = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The keyEvent. */ @java.lang.Override public com.google.analytics.admin.v1beta.KeyEvent getKeyEvent() { return keyEvent_ == null ? com.google.analytics.admin.v1beta.KeyEvent.getDefaultInstance() : keyEvent_; } /** * * * <pre> * Required. The Key Event to update. * The `name` field is used to identify the settings to be updated. * </pre> * * <code> * .google.analytics.admin.v1beta.KeyEvent key_event = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.analytics.admin.v1beta.KeyEventOrBuilder getKeyEventOrBuilder() { return keyEvent_ == null ? com.google.analytics.admin.v1beta.KeyEvent.getDefaultInstance() : keyEvent_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Required. The list of fields to be updated. Field names must be in snake * case (e.g., "field_to_update"). Omitted fields will not be updated. To * replace the entire entity, use one path with the string "*" to match all * fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The list of fields to be updated. Field names must be in snake * case (e.g., "field_to_update"). Omitted fields will not be updated. To * replace the entire entity, use one path with the string "*" to match all * fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Required. The list of fields to be updated. Field names must be in snake * case (e.g., "field_to_update"). Omitted fields will not be updated. To * replace the entire entity, use one path with the string "*" to match all * fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getKeyEvent()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getKeyEvent()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.analytics.admin.v1beta.UpdateKeyEventRequest)) { return super.equals(obj); } com.google.analytics.admin.v1beta.UpdateKeyEventRequest other = (com.google.analytics.admin.v1beta.UpdateKeyEventRequest) obj; if (hasKeyEvent() != other.hasKeyEvent()) return false; if (hasKeyEvent()) { if (!getKeyEvent().equals(other.getKeyEvent())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasKeyEvent()) { hash = (37 * hash) + KEY_EVENT_FIELD_NUMBER; hash = (53 * hash) + getKeyEvent().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.analytics.admin.v1beta.UpdateKeyEventRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1beta.UpdateKeyEventRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1beta.UpdateKeyEventRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1beta.UpdateKeyEventRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1beta.UpdateKeyEventRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1beta.UpdateKeyEventRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1beta.UpdateKeyEventRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.analytics.admin.v1beta.UpdateKeyEventRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.analytics.admin.v1beta.UpdateKeyEventRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.analytics.admin.v1beta.UpdateKeyEventRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.analytics.admin.v1beta.UpdateKeyEventRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.analytics.admin.v1beta.UpdateKeyEventRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.analytics.admin.v1beta.UpdateKeyEventRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for UpdateKeyEvent RPC * </pre> * * Protobuf type {@code google.analytics.admin.v1beta.UpdateKeyEventRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.analytics.admin.v1beta.UpdateKeyEventRequest) com.google.analytics.admin.v1beta.UpdateKeyEventRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.analytics.admin.v1beta.AnalyticsAdminProto .internal_static_google_analytics_admin_v1beta_UpdateKeyEventRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.analytics.admin.v1beta.AnalyticsAdminProto .internal_static_google_analytics_admin_v1beta_UpdateKeyEventRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.analytics.admin.v1beta.UpdateKeyEventRequest.class, com.google.analytics.admin.v1beta.UpdateKeyEventRequest.Builder.class); } // Construct using com.google.analytics.admin.v1beta.UpdateKeyEventRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getKeyEventFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; keyEvent_ = null; if (keyEventBuilder_ != null) { keyEventBuilder_.dispose(); keyEventBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.analytics.admin.v1beta.AnalyticsAdminProto .internal_static_google_analytics_admin_v1beta_UpdateKeyEventRequest_descriptor; } @java.lang.Override public com.google.analytics.admin.v1beta.UpdateKeyEventRequest getDefaultInstanceForType() { return com.google.analytics.admin.v1beta.UpdateKeyEventRequest.getDefaultInstance(); } @java.lang.Override public com.google.analytics.admin.v1beta.UpdateKeyEventRequest build() { com.google.analytics.admin.v1beta.UpdateKeyEventRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.analytics.admin.v1beta.UpdateKeyEventRequest buildPartial() { com.google.analytics.admin.v1beta.UpdateKeyEventRequest result = new com.google.analytics.admin.v1beta.UpdateKeyEventRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.analytics.admin.v1beta.UpdateKeyEventRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.keyEvent_ = keyEventBuilder_ == null ? keyEvent_ : keyEventBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.analytics.admin.v1beta.UpdateKeyEventRequest) { return mergeFrom((com.google.analytics.admin.v1beta.UpdateKeyEventRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.analytics.admin.v1beta.UpdateKeyEventRequest other) { if (other == com.google.analytics.admin.v1beta.UpdateKeyEventRequest.getDefaultInstance()) return this; if (other.hasKeyEvent()) { mergeKeyEvent(other.getKeyEvent()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getKeyEventFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.analytics.admin.v1beta.KeyEvent keyEvent_; private com.google.protobuf.SingleFieldBuilderV3< com.google.analytics.admin.v1beta.KeyEvent, com.google.analytics.admin.v1beta.KeyEvent.Builder, com.google.analytics.admin.v1beta.KeyEventOrBuilder> keyEventBuilder_; /** * * * <pre> * Required. The Key Event to update. * The `name` field is used to identify the settings to be updated. * </pre> * * <code> * .google.analytics.admin.v1beta.KeyEvent key_event = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the keyEvent field is set. */ public boolean hasKeyEvent() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The Key Event to update. * The `name` field is used to identify the settings to be updated. * </pre> * * <code> * .google.analytics.admin.v1beta.KeyEvent key_event = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The keyEvent. */ public com.google.analytics.admin.v1beta.KeyEvent getKeyEvent() { if (keyEventBuilder_ == null) { return keyEvent_ == null ? com.google.analytics.admin.v1beta.KeyEvent.getDefaultInstance() : keyEvent_; } else { return keyEventBuilder_.getMessage(); } } /** * * * <pre> * Required. The Key Event to update. * The `name` field is used to identify the settings to be updated. * </pre> * * <code> * .google.analytics.admin.v1beta.KeyEvent key_event = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setKeyEvent(com.google.analytics.admin.v1beta.KeyEvent value) { if (keyEventBuilder_ == null) { if (value == null) { throw new NullPointerException(); } keyEvent_ = value; } else { keyEventBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The Key Event to update. * The `name` field is used to identify the settings to be updated. * </pre> * * <code> * .google.analytics.admin.v1beta.KeyEvent key_event = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setKeyEvent(com.google.analytics.admin.v1beta.KeyEvent.Builder builderForValue) { if (keyEventBuilder_ == null) { keyEvent_ = builderForValue.build(); } else { keyEventBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The Key Event to update. * The `name` field is used to identify the settings to be updated. * </pre> * * <code> * .google.analytics.admin.v1beta.KeyEvent key_event = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeKeyEvent(com.google.analytics.admin.v1beta.KeyEvent value) { if (keyEventBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && keyEvent_ != null && keyEvent_ != com.google.analytics.admin.v1beta.KeyEvent.getDefaultInstance()) { getKeyEventBuilder().mergeFrom(value); } else { keyEvent_ = value; } } else { keyEventBuilder_.mergeFrom(value); } if (keyEvent_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The Key Event to update. * The `name` field is used to identify the settings to be updated. * </pre> * * <code> * .google.analytics.admin.v1beta.KeyEvent key_event = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearKeyEvent() { bitField0_ = (bitField0_ & ~0x00000001); keyEvent_ = null; if (keyEventBuilder_ != null) { keyEventBuilder_.dispose(); keyEventBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The Key Event to update. * The `name` field is used to identify the settings to be updated. * </pre> * * <code> * .google.analytics.admin.v1beta.KeyEvent key_event = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.analytics.admin.v1beta.KeyEvent.Builder getKeyEventBuilder() { bitField0_ |= 0x00000001; onChanged(); return getKeyEventFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The Key Event to update. * The `name` field is used to identify the settings to be updated. * </pre> * * <code> * .google.analytics.admin.v1beta.KeyEvent key_event = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.analytics.admin.v1beta.KeyEventOrBuilder getKeyEventOrBuilder() { if (keyEventBuilder_ != null) { return keyEventBuilder_.getMessageOrBuilder(); } else { return keyEvent_ == null ? com.google.analytics.admin.v1beta.KeyEvent.getDefaultInstance() : keyEvent_; } } /** * * * <pre> * Required. The Key Event to update. * The `name` field is used to identify the settings to be updated. * </pre> * * <code> * .google.analytics.admin.v1beta.KeyEvent key_event = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.analytics.admin.v1beta.KeyEvent, com.google.analytics.admin.v1beta.KeyEvent.Builder, com.google.analytics.admin.v1beta.KeyEventOrBuilder> getKeyEventFieldBuilder() { if (keyEventBuilder_ == null) { keyEventBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.analytics.admin.v1beta.KeyEvent, com.google.analytics.admin.v1beta.KeyEvent.Builder, com.google.analytics.admin.v1beta.KeyEventOrBuilder>( getKeyEvent(), getParentForChildren(), isClean()); keyEvent_ = null; } return keyEventBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Required. The list of fields to be updated. Field names must be in snake * case (e.g., "field_to_update"). Omitted fields will not be updated. To * replace the entire entity, use one path with the string "*" to match all * fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The list of fields to be updated. Field names must be in snake * case (e.g., "field_to_update"). Omitted fields will not be updated. To * replace the entire entity, use one path with the string "*" to match all * fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Required. The list of fields to be updated. Field names must be in snake * case (e.g., "field_to_update"). Omitted fields will not be updated. To * replace the entire entity, use one path with the string "*" to match all * fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The list of fields to be updated. Field names must be in snake * case (e.g., "field_to_update"). Omitted fields will not be updated. To * replace the entire entity, use one path with the string "*" to match all * fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The list of fields to be updated. Field names must be in snake * case (e.g., "field_to_update"). Omitted fields will not be updated. To * replace the entire entity, use one path with the string "*" to match all * fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The list of fields to be updated. Field names must be in snake * case (e.g., "field_to_update"). Omitted fields will not be updated. To * replace the entire entity, use one path with the string "*" to match all * fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The list of fields to be updated. Field names must be in snake * case (e.g., "field_to_update"). Omitted fields will not be updated. To * replace the entire entity, use one path with the string "*" to match all * fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The list of fields to be updated. Field names must be in snake * case (e.g., "field_to_update"). Omitted fields will not be updated. To * replace the entire entity, use one path with the string "*" to match all * fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Required. The list of fields to be updated. Field names must be in snake * case (e.g., "field_to_update"). Omitted fields will not be updated. To * replace the entire entity, use one path with the string "*" to match all * fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.analytics.admin.v1beta.UpdateKeyEventRequest) } // @@protoc_insertion_point(class_scope:google.analytics.admin.v1beta.UpdateKeyEventRequest) private static final com.google.analytics.admin.v1beta.UpdateKeyEventRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.analytics.admin.v1beta.UpdateKeyEventRequest(); } public static com.google.analytics.admin.v1beta.UpdateKeyEventRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateKeyEventRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateKeyEventRequest>() { @java.lang.Override public UpdateKeyEventRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateKeyEventRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateKeyEventRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.analytics.admin.v1beta.UpdateKeyEventRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,435
java-servicehealth/google-cloud-servicehealth/src/main/java/com/google/cloud/servicehealth/v1/stub/ServiceHealthStubSettings.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.servicehealth.v1.stub; import static com.google.cloud.servicehealth.v1.ServiceHealthClient.ListEventsPagedResponse; import static com.google.cloud.servicehealth.v1.ServiceHealthClient.ListLocationsPagedResponse; import static com.google.cloud.servicehealth.v1.ServiceHealthClient.ListOrganizationEventsPagedResponse; import static com.google.cloud.servicehealth.v1.ServiceHealthClient.ListOrganizationImpactsPagedResponse; import com.google.api.core.ApiFunction; import com.google.api.core.ApiFuture; import com.google.api.core.BetaApi; import com.google.api.core.ObsoleteApi; import com.google.api.gax.core.GaxProperties; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.GrpcTransportChannel; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.httpjson.GaxHttpJsonProperties; import com.google.api.gax.httpjson.HttpJsonTransportChannel; import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.ApiCallContext; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.PageContext; import com.google.api.gax.rpc.PagedCallSettings; import com.google.api.gax.rpc.PagedListDescriptor; import com.google.api.gax.rpc.PagedListResponseFactory; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.cloud.servicehealth.v1.Event; import com.google.cloud.servicehealth.v1.GetEventRequest; import com.google.cloud.servicehealth.v1.GetOrganizationEventRequest; import com.google.cloud.servicehealth.v1.GetOrganizationImpactRequest; import com.google.cloud.servicehealth.v1.ListEventsRequest; import com.google.cloud.servicehealth.v1.ListEventsResponse; import com.google.cloud.servicehealth.v1.ListOrganizationEventsRequest; import com.google.cloud.servicehealth.v1.ListOrganizationEventsResponse; import com.google.cloud.servicehealth.v1.ListOrganizationImpactsRequest; import com.google.cloud.servicehealth.v1.ListOrganizationImpactsResponse; import com.google.cloud.servicehealth.v1.OrganizationEvent; import com.google.cloud.servicehealth.v1.OrganizationImpact; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import java.io.IOException; import java.time.Duration; import java.util.List; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link ServiceHealthStub}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (servicehealth.googleapis.com) and default port (443) are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the * [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings) * of getEvent: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * ServiceHealthStubSettings.Builder serviceHealthSettingsBuilder = * ServiceHealthStubSettings.newBuilder(); * serviceHealthSettingsBuilder * .getEventSettings() * .setRetrySettings( * serviceHealthSettingsBuilder * .getEventSettings() * .getRetrySettings() * .toBuilder() * .setInitialRetryDelayDuration(Duration.ofSeconds(1)) * .setInitialRpcTimeoutDuration(Duration.ofSeconds(5)) * .setMaxAttempts(5) * .setMaxRetryDelayDuration(Duration.ofSeconds(30)) * .setMaxRpcTimeoutDuration(Duration.ofSeconds(60)) * .setRetryDelayMultiplier(1.3) * .setRpcTimeoutMultiplier(1.5) * .setTotalTimeoutDuration(Duration.ofSeconds(300)) * .build()); * ServiceHealthStubSettings serviceHealthSettings = serviceHealthSettingsBuilder.build(); * }</pre> * * Please refer to the [Client Side Retry * Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for * additional support in setting retries. */ @Generated("by gapic-generator-java") public class ServiceHealthStubSettings extends StubSettings<ServiceHealthStubSettings> { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder().add("https://www.googleapis.com/auth/cloud-platform").build(); private final PagedCallSettings<ListEventsRequest, ListEventsResponse, ListEventsPagedResponse> listEventsSettings; private final UnaryCallSettings<GetEventRequest, Event> getEventSettings; private final PagedCallSettings< ListOrganizationEventsRequest, ListOrganizationEventsResponse, ListOrganizationEventsPagedResponse> listOrganizationEventsSettings; private final UnaryCallSettings<GetOrganizationEventRequest, OrganizationEvent> getOrganizationEventSettings; private final PagedCallSettings< ListOrganizationImpactsRequest, ListOrganizationImpactsResponse, ListOrganizationImpactsPagedResponse> listOrganizationImpactsSettings; private final UnaryCallSettings<GetOrganizationImpactRequest, OrganizationImpact> getOrganizationImpactSettings; private final PagedCallSettings< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings; private final UnaryCallSettings<GetLocationRequest, Location> getLocationSettings; private static final PagedListDescriptor<ListEventsRequest, ListEventsResponse, Event> LIST_EVENTS_PAGE_STR_DESC = new PagedListDescriptor<ListEventsRequest, ListEventsResponse, Event>() { @Override public String emptyToken() { return ""; } @Override public ListEventsRequest injectToken(ListEventsRequest payload, String token) { return ListEventsRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListEventsRequest injectPageSize(ListEventsRequest payload, int pageSize) { return ListEventsRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListEventsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListEventsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<Event> extractResources(ListEventsResponse payload) { return payload.getEventsList(); } }; private static final PagedListDescriptor< ListOrganizationEventsRequest, ListOrganizationEventsResponse, OrganizationEvent> LIST_ORGANIZATION_EVENTS_PAGE_STR_DESC = new PagedListDescriptor< ListOrganizationEventsRequest, ListOrganizationEventsResponse, OrganizationEvent>() { @Override public String emptyToken() { return ""; } @Override public ListOrganizationEventsRequest injectToken( ListOrganizationEventsRequest payload, String token) { return ListOrganizationEventsRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListOrganizationEventsRequest injectPageSize( ListOrganizationEventsRequest payload, int pageSize) { return ListOrganizationEventsRequest.newBuilder(payload) .setPageSize(pageSize) .build(); } @Override public Integer extractPageSize(ListOrganizationEventsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListOrganizationEventsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<OrganizationEvent> extractResources( ListOrganizationEventsResponse payload) { return payload.getOrganizationEventsList(); } }; private static final PagedListDescriptor< ListOrganizationImpactsRequest, ListOrganizationImpactsResponse, OrganizationImpact> LIST_ORGANIZATION_IMPACTS_PAGE_STR_DESC = new PagedListDescriptor< ListOrganizationImpactsRequest, ListOrganizationImpactsResponse, OrganizationImpact>() { @Override public String emptyToken() { return ""; } @Override public ListOrganizationImpactsRequest injectToken( ListOrganizationImpactsRequest payload, String token) { return ListOrganizationImpactsRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListOrganizationImpactsRequest injectPageSize( ListOrganizationImpactsRequest payload, int pageSize) { return ListOrganizationImpactsRequest.newBuilder(payload) .setPageSize(pageSize) .build(); } @Override public Integer extractPageSize(ListOrganizationImpactsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListOrganizationImpactsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<OrganizationImpact> extractResources( ListOrganizationImpactsResponse payload) { return payload.getOrganizationImpactsList(); } }; private static final PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location> LIST_LOCATIONS_PAGE_STR_DESC = new PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location>() { @Override public String emptyToken() { return ""; } @Override public ListLocationsRequest injectToken(ListLocationsRequest payload, String token) { return ListLocationsRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListLocationsRequest injectPageSize(ListLocationsRequest payload, int pageSize) { return ListLocationsRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListLocationsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListLocationsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<Location> extractResources(ListLocationsResponse payload) { return payload.getLocationsList(); } }; private static final PagedListResponseFactory< ListEventsRequest, ListEventsResponse, ListEventsPagedResponse> LIST_EVENTS_PAGE_STR_FACT = new PagedListResponseFactory< ListEventsRequest, ListEventsResponse, ListEventsPagedResponse>() { @Override public ApiFuture<ListEventsPagedResponse> getFuturePagedResponse( UnaryCallable<ListEventsRequest, ListEventsResponse> callable, ListEventsRequest request, ApiCallContext context, ApiFuture<ListEventsResponse> futureResponse) { PageContext<ListEventsRequest, ListEventsResponse, Event> pageContext = PageContext.create(callable, LIST_EVENTS_PAGE_STR_DESC, request, context); return ListEventsPagedResponse.createAsync(pageContext, futureResponse); } }; private static final PagedListResponseFactory< ListOrganizationEventsRequest, ListOrganizationEventsResponse, ListOrganizationEventsPagedResponse> LIST_ORGANIZATION_EVENTS_PAGE_STR_FACT = new PagedListResponseFactory< ListOrganizationEventsRequest, ListOrganizationEventsResponse, ListOrganizationEventsPagedResponse>() { @Override public ApiFuture<ListOrganizationEventsPagedResponse> getFuturePagedResponse( UnaryCallable<ListOrganizationEventsRequest, ListOrganizationEventsResponse> callable, ListOrganizationEventsRequest request, ApiCallContext context, ApiFuture<ListOrganizationEventsResponse> futureResponse) { PageContext< ListOrganizationEventsRequest, ListOrganizationEventsResponse, OrganizationEvent> pageContext = PageContext.create( callable, LIST_ORGANIZATION_EVENTS_PAGE_STR_DESC, request, context); return ListOrganizationEventsPagedResponse.createAsync(pageContext, futureResponse); } }; private static final PagedListResponseFactory< ListOrganizationImpactsRequest, ListOrganizationImpactsResponse, ListOrganizationImpactsPagedResponse> LIST_ORGANIZATION_IMPACTS_PAGE_STR_FACT = new PagedListResponseFactory< ListOrganizationImpactsRequest, ListOrganizationImpactsResponse, ListOrganizationImpactsPagedResponse>() { @Override public ApiFuture<ListOrganizationImpactsPagedResponse> getFuturePagedResponse( UnaryCallable<ListOrganizationImpactsRequest, ListOrganizationImpactsResponse> callable, ListOrganizationImpactsRequest request, ApiCallContext context, ApiFuture<ListOrganizationImpactsResponse> futureResponse) { PageContext< ListOrganizationImpactsRequest, ListOrganizationImpactsResponse, OrganizationImpact> pageContext = PageContext.create( callable, LIST_ORGANIZATION_IMPACTS_PAGE_STR_DESC, request, context); return ListOrganizationImpactsPagedResponse.createAsync(pageContext, futureResponse); } }; private static final PagedListResponseFactory< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> LIST_LOCATIONS_PAGE_STR_FACT = new PagedListResponseFactory< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>() { @Override public ApiFuture<ListLocationsPagedResponse> getFuturePagedResponse( UnaryCallable<ListLocationsRequest, ListLocationsResponse> callable, ListLocationsRequest request, ApiCallContext context, ApiFuture<ListLocationsResponse> futureResponse) { PageContext<ListLocationsRequest, ListLocationsResponse, Location> pageContext = PageContext.create(callable, LIST_LOCATIONS_PAGE_STR_DESC, request, context); return ListLocationsPagedResponse.createAsync(pageContext, futureResponse); } }; /** Returns the object with the settings used for calls to listEvents. */ public PagedCallSettings<ListEventsRequest, ListEventsResponse, ListEventsPagedResponse> listEventsSettings() { return listEventsSettings; } /** Returns the object with the settings used for calls to getEvent. */ public UnaryCallSettings<GetEventRequest, Event> getEventSettings() { return getEventSettings; } /** Returns the object with the settings used for calls to listOrganizationEvents. */ public PagedCallSettings< ListOrganizationEventsRequest, ListOrganizationEventsResponse, ListOrganizationEventsPagedResponse> listOrganizationEventsSettings() { return listOrganizationEventsSettings; } /** Returns the object with the settings used for calls to getOrganizationEvent. */ public UnaryCallSettings<GetOrganizationEventRequest, OrganizationEvent> getOrganizationEventSettings() { return getOrganizationEventSettings; } /** Returns the object with the settings used for calls to listOrganizationImpacts. */ public PagedCallSettings< ListOrganizationImpactsRequest, ListOrganizationImpactsResponse, ListOrganizationImpactsPagedResponse> listOrganizationImpactsSettings() { return listOrganizationImpactsSettings; } /** Returns the object with the settings used for calls to getOrganizationImpact. */ public UnaryCallSettings<GetOrganizationImpactRequest, OrganizationImpact> getOrganizationImpactSettings() { return getOrganizationImpactSettings; } /** Returns the object with the settings used for calls to listLocations. */ public PagedCallSettings<ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings() { return listLocationsSettings; } /** Returns the object with the settings used for calls to getLocation. */ public UnaryCallSettings<GetLocationRequest, Location> getLocationSettings() { return getLocationSettings; } public ServiceHealthStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcServiceHealthStub.create(this); } if (getTransportChannelProvider() .getTransportName() .equals(HttpJsonTransportChannel.getHttpJsonTransportName())) { return HttpJsonServiceHealthStub.create(this); } throw new UnsupportedOperationException( String.format( "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns the default service name. */ @Override public String getServiceName() { return "servicehealth"; } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ @ObsoleteApi("Use getEndpoint() instead") public static String getDefaultEndpoint() { return "servicehealth.googleapis.com:443"; } /** Returns the default mTLS service endpoint. */ public static String getDefaultMtlsEndpoint() { return "servicehealth.mtls.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder() .setScopesToApply(DEFAULT_SERVICE_SCOPES) .setUseJwtAccessWithScope(true); } /** Returns a builder for the default gRPC ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return InstantiatingGrpcChannelProvider.newBuilder() .setMaxInboundMessageSize(Integer.MAX_VALUE); } /** Returns a builder for the default REST ChannelProvider for this service. */ @BetaApi public static InstantiatingHttpJsonChannelProvider.Builder defaultHttpJsonTransportProviderBuilder() { return InstantiatingHttpJsonChannelProvider.newBuilder(); } public static TransportChannelProvider defaultTransportChannelProvider() { return defaultGrpcTransportProviderBuilder().build(); } public static ApiClientHeaderProvider.Builder defaultGrpcApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken( "gapic", GaxProperties.getLibraryVersion(ServiceHealthStubSettings.class)) .setTransportToken( GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion()); } public static ApiClientHeaderProvider.Builder defaultHttpJsonApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken( "gapic", GaxProperties.getLibraryVersion(ServiceHealthStubSettings.class)) .setTransportToken( GaxHttpJsonProperties.getHttpJsonTokenName(), GaxHttpJsonProperties.getHttpJsonVersion()); } public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return ServiceHealthStubSettings.defaultGrpcApiClientHeaderProviderBuilder(); } /** Returns a new gRPC builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new REST builder for this class. */ public static Builder newHttpJsonBuilder() { return Builder.createHttpJsonDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected ServiceHealthStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); listEventsSettings = settingsBuilder.listEventsSettings().build(); getEventSettings = settingsBuilder.getEventSettings().build(); listOrganizationEventsSettings = settingsBuilder.listOrganizationEventsSettings().build(); getOrganizationEventSettings = settingsBuilder.getOrganizationEventSettings().build(); listOrganizationImpactsSettings = settingsBuilder.listOrganizationImpactsSettings().build(); getOrganizationImpactSettings = settingsBuilder.getOrganizationImpactSettings().build(); listLocationsSettings = settingsBuilder.listLocationsSettings().build(); getLocationSettings = settingsBuilder.getLocationSettings().build(); } /** Builder for ServiceHealthStubSettings. */ public static class Builder extends StubSettings.Builder<ServiceHealthStubSettings, Builder> { private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders; private final PagedCallSettings.Builder< ListEventsRequest, ListEventsResponse, ListEventsPagedResponse> listEventsSettings; private final UnaryCallSettings.Builder<GetEventRequest, Event> getEventSettings; private final PagedCallSettings.Builder< ListOrganizationEventsRequest, ListOrganizationEventsResponse, ListOrganizationEventsPagedResponse> listOrganizationEventsSettings; private final UnaryCallSettings.Builder<GetOrganizationEventRequest, OrganizationEvent> getOrganizationEventSettings; private final PagedCallSettings.Builder< ListOrganizationImpactsRequest, ListOrganizationImpactsResponse, ListOrganizationImpactsPagedResponse> listOrganizationImpactsSettings; private final UnaryCallSettings.Builder<GetOrganizationImpactRequest, OrganizationImpact> getOrganizationImpactSettings; private final PagedCallSettings.Builder< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings; private final UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings; private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions = ImmutableMap.builder(); definitions.put( "retry_policy_1_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNAVAILABLE))); definitions.put( "retry_policy_0_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNAVAILABLE))); definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList())); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder(); RetrySettings settings = null; settings = RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(1000L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelayDuration(Duration.ofMillis(10000L)) .setInitialRpcTimeoutDuration(Duration.ofMillis(60000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ofMillis(60000L)) .setTotalTimeoutDuration(Duration.ofMillis(60000L)) .build(); definitions.put("retry_policy_1_params", settings); settings = RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(1000L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelayDuration(Duration.ofMillis(10000L)) .setInitialRpcTimeoutDuration(Duration.ofMillis(60000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ofMillis(60000L)) .setTotalTimeoutDuration(Duration.ofMillis(60000L)) .build(); definitions.put("retry_policy_0_params", settings); settings = RetrySettings.newBuilder().setRpcTimeoutMultiplier(1.0).build(); definitions.put("no_retry_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); listEventsSettings = PagedCallSettings.newBuilder(LIST_EVENTS_PAGE_STR_FACT); getEventSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); listOrganizationEventsSettings = PagedCallSettings.newBuilder(LIST_ORGANIZATION_EVENTS_PAGE_STR_FACT); getOrganizationEventSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); listOrganizationImpactsSettings = PagedCallSettings.newBuilder(LIST_ORGANIZATION_IMPACTS_PAGE_STR_FACT); getOrganizationImpactSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); listLocationsSettings = PagedCallSettings.newBuilder(LIST_LOCATIONS_PAGE_STR_FACT); getLocationSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( listEventsSettings, getEventSettings, listOrganizationEventsSettings, getOrganizationEventSettings, listOrganizationImpactsSettings, getOrganizationImpactSettings, listLocationsSettings, getLocationSettings); initDefaults(this); } protected Builder(ServiceHealthStubSettings settings) { super(settings); listEventsSettings = settings.listEventsSettings.toBuilder(); getEventSettings = settings.getEventSettings.toBuilder(); listOrganizationEventsSettings = settings.listOrganizationEventsSettings.toBuilder(); getOrganizationEventSettings = settings.getOrganizationEventSettings.toBuilder(); listOrganizationImpactsSettings = settings.listOrganizationImpactsSettings.toBuilder(); getOrganizationImpactSettings = settings.getOrganizationImpactSettings.toBuilder(); listLocationsSettings = settings.listLocationsSettings.toBuilder(); getLocationSettings = settings.getLocationSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( listEventsSettings, getEventSettings, listOrganizationEventsSettings, getOrganizationEventSettings, listOrganizationImpactsSettings, getOrganizationImpactSettings, listLocationsSettings, getLocationSettings); } private static Builder createDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder createHttpJsonDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultHttpJsonTransportProviderBuilder().build()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultHttpJsonApiClientHeaderProviderBuilder().build()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder initDefaults(Builder builder) { builder .listEventsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .getEventSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .listOrganizationEventsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .getOrganizationEventSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .listOrganizationImpactsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .getOrganizationImpactSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .listLocationsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params")); builder .getLocationSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params")); return builder; } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); return this; } public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() { return unaryMethodSettingsBuilders; } /** Returns the builder for the settings used for calls to listEvents. */ public PagedCallSettings.Builder<ListEventsRequest, ListEventsResponse, ListEventsPagedResponse> listEventsSettings() { return listEventsSettings; } /** Returns the builder for the settings used for calls to getEvent. */ public UnaryCallSettings.Builder<GetEventRequest, Event> getEventSettings() { return getEventSettings; } /** Returns the builder for the settings used for calls to listOrganizationEvents. */ public PagedCallSettings.Builder< ListOrganizationEventsRequest, ListOrganizationEventsResponse, ListOrganizationEventsPagedResponse> listOrganizationEventsSettings() { return listOrganizationEventsSettings; } /** Returns the builder for the settings used for calls to getOrganizationEvent. */ public UnaryCallSettings.Builder<GetOrganizationEventRequest, OrganizationEvent> getOrganizationEventSettings() { return getOrganizationEventSettings; } /** Returns the builder for the settings used for calls to listOrganizationImpacts. */ public PagedCallSettings.Builder< ListOrganizationImpactsRequest, ListOrganizationImpactsResponse, ListOrganizationImpactsPagedResponse> listOrganizationImpactsSettings() { return listOrganizationImpactsSettings; } /** Returns the builder for the settings used for calls to getOrganizationImpact. */ public UnaryCallSettings.Builder<GetOrganizationImpactRequest, OrganizationImpact> getOrganizationImpactSettings() { return getOrganizationImpactSettings; } /** Returns the builder for the settings used for calls to listLocations. */ public PagedCallSettings.Builder< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings() { return listLocationsSettings; } /** Returns the builder for the settings used for calls to getLocation. */ public UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings() { return getLocationSettings; } @Override public ServiceHealthStubSettings build() throws IOException { return new ServiceHealthStubSettings(this); } } }
apache/uima-uimaj
36,429
uimaj-core/src/main/java/org/apache/uima/resource/impl/ResourceManager_impl.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.uima.resource.impl; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import org.apache.uima.UIMAFramework; import org.apache.uima.UIMA_IllegalStateException; import org.apache.uima.analysis_engine.impl.AnalysisEngineImplBase; import org.apache.uima.analysis_engine.impl.PearAnalysisEngineWrapper; import org.apache.uima.analysis_engine.impl.PearAnalysisEngineWrapper.StringPair; import org.apache.uima.internal.util.Class_TCCL; import org.apache.uima.internal.util.Misc; import org.apache.uima.internal.util.UIMAClassLoader; import org.apache.uima.resource.CasManager; import org.apache.uima.resource.DataResource; import org.apache.uima.resource.ExternalResourceDependency; import org.apache.uima.resource.ExternalResourceDescription; import org.apache.uima.resource.ParameterizedDataResource; import org.apache.uima.resource.RelativePathResolver; import org.apache.uima.resource.Resource; import org.apache.uima.resource.ResourceAccessException; import org.apache.uima.resource.ResourceInitializationException; import org.apache.uima.resource.ResourceManager; import org.apache.uima.resource.ResourceSpecifier; import org.apache.uima.resource.SharedResourceObject; import org.apache.uima.resource.metadata.ExternalResourceBinding; import org.apache.uima.resource.metadata.ResourceManagerConfiguration; import org.apache.uima.util.Level; import org.apache.uima.util.XMLizable; /** * Reference implementation of {@link org.apache.uima.resource.ResourceManager}. */ public class ResourceManager_impl implements ResourceManager { private static final AtomicInteger IMPORT_URL_CACHE_WARNING_THROTTLE = new AtomicInteger(); /** * Ties an External Resource instance to * <ul> * <li>its description * <ul> * <li>name * <li>textual description * <li>a ResourceSpecifier describing how to create it * <li>(optional) the String name of the Java class that implements the resource) * </ul> * <li>its defining UIMA Context * </ul> * These are used to validate multiple declarations, and to get a resource to tie it to a binding */ protected static class ResourceRegistration { /** * For ParameterizedDataResources or DataResources, is the implementation object, which is an * arbitrary Java class implementing SharedDataResource (which has the "load" method) * * If the external resource specification omitted the implementation class, a default * FileResource */ Object resource; ExternalResourceDescription description; String definingContext; public ResourceRegistration(Object aResource, ExternalResourceDescription aDescription, String aDefiningContext) { resource = aResource; description = aDescription; definingContext = aDefiningContext; } } /** * resource bundle for log messages */ protected static final String LOG_RESOURCE_BUNDLE = "org.apache.uima.impl.log_messages"; protected static final Class<Resource> EMPTY_RESOURCE_CLASS = Resource.class; private static final URL[] emptyURLarray = new URL[0]; private AtomicBoolean isDestroyed = new AtomicBoolean(false); /** * a monitor lock for synchronizing get/set of casManager ref */ private final Object casManagerMonitor = new Object(); /** * Object used for resolving relative paths. This is built by parsing the data path. */ private final RelativePathResolver mRelativePathResolver; /** * Map from qualified key names (declared in resource dependency XML) to either DataResource * objects or SharedResourceObjects that impl Resource. * * This map is many to one (multiple keys may refer to the same Resource object) * * Can't be concurrentMap because it (currently) depends on storing nulls * * Keeping the Map's 2nd argument as Object, although it could be Resource, for backwards * compatibility */ protected final Map<String, Object> mResourceMap; /** * Internal map from resource names (declared in resource declaration XML) to ResourceRegistration * objects. Uses: for detecting if multiple declarations are made for the same resource name * .values() for list of all resources that have been registered * * This is a one-to-one map. */ protected final Map<String, ResourceRegistration> mInternalResourceRegistrationMap; /** * Map from String keys to Class objects. For ParameterizedResources only, stores the * implementation class (not a Resource) corresponding to each resource name. * * These class objects may or may not be Resource instances. They may be arbitrary classes, except * that they must implement SharedResourceObject. * * This is a many to one map; many keys may refer to the same class * * key = aQualifiedContextName + the key name in an external resource binding */ protected final Map<String, Class<?>> mParameterizedResourceImplClassMap; /** * Internal map from resource names (declared in resource declaration XML) to Class objects for * parameterized Resource. * * These class objects may or may not be Resource instances. They may be arbitrary classes, except * that they must implement SharedResourceObject. * * These are "customized" when referenced, by parameter strings (such as language, for a * Dictionary resource). Used internally during resource initialization. * * key = external resource declared name. */ protected final Map<String, Class<?>> mInternalParameterizedResourceImplClassMap; /** * Map from ParameterizedResourceKey to SharedResourceObject or DataResource instances. * * Note: SharedResourceObjects are not "Resource" instances, but rather arbitrary Java classes * that are the "implementations" of Resources. The only requirement on these Java classes is that * they implement SharedResourceObject interface. * * This map is for ParameterizedResources only, and stores the SharedResourceObjects or * DataResource objects that have already been instantiated and loaded. */ protected final Map<List<Object>, Object> mParameterizedResourceInstanceMap; /** * UIMA extension ClassLoader. ClassLoader is created if an extension classpath is specified at * the ResourceManager * * volatile might be better than synch sets/gets */ private volatile UIMAClassLoader uimaCL = null; /** CasManager - manages creation and pooling of CASes. */ // volatile to support double-checked locking idiom protected volatile CasManager mCasManager = null; //@formatter:off /** * Cache of imported descriptors, so that parsed objects can be reused if the same URL is imported * more than once. * * All callers of this synchronize on the importCache object before doing a * get * ... * put * sequence * * Use Case where synchronization is needed: * running multiple instances on multiple threads, sharing a common resource manager, * the initialization that merges typePriorities happens lazily, when using Cas Multipliers, * and occurs when the first getCas call happens on a thread. Although these calls * are synchronized among themselves, any other use of this map that might occur * simultaneously is not. */ //@formatter:on // leaving this as a synchronizedMap - for backwards compatibility // internal users do sync around get/set pairs anyways, but can't rely on // what external users do // Because internal users do a sync, only one thread at a time is using this // (for internal calls) anyways, so there's no advantage to the extra overhead // of making this a ConcurrentHashMap (March 2014) private Map<String, XMLizable> importCache = Collections.synchronizedMap(new HashMap<>()); /** * Cache of imported descriptor URLs from which the parsed objects in importCache were created, so * that these URLs are not re-parsed if the same URL is imported again. */ @Deprecated(since = "3.3.0") private Map<String, Set<String>> importUrlsCache = Collections.synchronizedMap(new HashMap<>()); /** * Creates a new <code>ResourceManager_impl</code>. */ public ResourceManager_impl() { mResourceMap = Collections.synchronizedMap(new HashMap<>()); mInternalResourceRegistrationMap = new ConcurrentHashMap<>(); mParameterizedResourceImplClassMap = new ConcurrentHashMap<>(); mInternalParameterizedResourceImplClassMap = new ConcurrentHashMap<>(); mParameterizedResourceInstanceMap = new ConcurrentHashMap<>(); mRelativePathResolver = new RelativePathResolver_impl(); } /** * Creates a new <code>ResourceManager_impl</code> with a custom ClassLoader to use for locating * resources. * * @param aClassLoader * - */ public ResourceManager_impl(ClassLoader aClassLoader) { mResourceMap = Collections.synchronizedMap(new HashMap<>()); mInternalResourceRegistrationMap = new ConcurrentHashMap<>(); mParameterizedResourceImplClassMap = new ConcurrentHashMap<>(); mInternalParameterizedResourceImplClassMap = new ConcurrentHashMap<>(); mParameterizedResourceInstanceMap = new ConcurrentHashMap<>(); mRelativePathResolver = new RelativePathResolver_impl(aClassLoader); } /* * Version for Pear wrapper */ public ResourceManager_impl(Map<String, Object> resourceMap, Map<String, ResourceRegistration> internalResourceRegistrationMap, Map<String, Class<?>> parameterizedResourceImplClassMap, Map<String, Class<?>> internalParameterizedResourceImplClassMap, Map<List<Object>, Object> parameterizedResourceInstanceMap) { mResourceMap = resourceMap; mInternalResourceRegistrationMap = internalResourceRegistrationMap; mParameterizedResourceImplClassMap = parameterizedResourceImplClassMap; mInternalParameterizedResourceImplClassMap = internalParameterizedResourceImplClassMap; mParameterizedResourceInstanceMap = parameterizedResourceInstanceMap; mRelativePathResolver = new RelativePathResolver_impl(); } public ResourceManager_impl copy() { ResourceManager_impl rm = new ResourceManager_impl(mResourceMap, mInternalResourceRegistrationMap, mParameterizedResourceImplClassMap, mInternalParameterizedResourceImplClassMap, mParameterizedResourceInstanceMap); // non-final fields init rm.uimaCL = uimaCL; rm.importCache = importCache; rm.importUrlsCache = importUrlsCache; return rm; } /** * Support reusing UIMA Class Loader instances to speed up things including the Component * Description Editor when obtaining info from CustomResourceSpecifiers * https://issues.apache.org/jira/browse/UIMA-1722 * * @param uimaCL * - * @param resolveResource * - */ public synchronized void setExtensionClassPath(UIMAClassLoader uimaCL, boolean resolveResource) { this.uimaCL = uimaCL; if (resolveResource) { // set UIMA extension ClassLoader also to resolve resources getRelativePathResolver().setPathResolverClassLoader(uimaCL); } } @Override public synchronized void setExtensionClassPath(String classpath, boolean resolveResource) throws MalformedURLException { // create UIMA extension ClassLoader with the given classpath uimaCL = new UIMAClassLoader(classpath, Class_TCCL.get_parent_cl()); if (resolveResource) { // set UIMA extension ClassLoader also to resolve resources getRelativePathResolver().setPathResolverClassLoader(uimaCL); } } @Override public synchronized void setExtensionClassPath(ClassLoader parent, String classpath, boolean resolveResource) throws MalformedURLException { // create UIMA extension ClassLoader with the given classpath uimaCL = new UIMAClassLoader(classpath, parent); if (resolveResource) { // set UIMA extension ClassLoader also to resolve resources getRelativePathResolver().setPathResolverClassLoader(uimaCL); } } // https://issues.apache.org/jira/browse/UIMA-5553 // https://issues.apache.org/jira/browse/UIMA-5609 // synchronized because the other methods that set the extension class loader are. public synchronized void setExtensionClassLoaderImpl(ClassLoader classLoader, boolean resolveResource) { uimaCL = (classLoader instanceof UIMAClassLoader) ? ((UIMAClassLoader) classLoader) : new UIMAClassLoader(emptyURLarray, classLoader); if (resolveResource) { // set UIMA extension ClassLoader also to resolve resources getRelativePathResolver().setPathResolverClassLoader(uimaCL); } } @Override public ClassLoader getExtensionClassLoader() { return uimaCL; } @Deprecated(since = "3.3.0") @Override public String getDataPath() { return getRelativePathResolver().getDataPath(); } @Deprecated(since = "3.6.0") @Override public List<String> getDataPathElements() { return getRelativePathResolver().getDataPathElements(); } @Override public List<URL> getDataPathUrls() { return getRelativePathResolver().getDataPathUrls(); } @Deprecated(since = "3.6.0") @Override public void setDataPath(String aPath) throws MalformedURLException { getRelativePathResolver().setDataPath(aPath); } @Override public void setDataPathElements(String... aElements) throws MalformedURLException { getRelativePathResolver().setDataPathElements(aElements); } @Override public void setDataPathElements(File... aElements) throws MalformedURLException { getRelativePathResolver().setDataPathElements(aElements); } @Override public void setDataPathUrls(URL... aUrls) { getRelativePathResolver().setDataPathElements(aUrls); } @Override public URL resolveRelativePath(String aRelativePath) throws MalformedURLException { return getRelativePathResolver().resolveRelativePath(aRelativePath); } private void checkDestroyed() { if (isDestroyed.get()) { throw new IllegalStateException("ResourceManager is destroyed"); } } @Override public Object getResource(String aName) throws ResourceAccessException { checkDestroyed(); Object r = mResourceMap.get(aName); // if this is a ParameterizedDataResource, it is an error if (r instanceof ParameterizedDataResource) { throw new ResourceAccessException(ResourceAccessException.PARAMETERS_REQUIRED, new Object[] { aName }); } return r; } @Override public Object getResource(String aName, String[] aParams) throws ResourceAccessException { // @formatter:off /* Multi-core design * This may be called by user code sharing the same Resource Manager, and / or the same * uima context object. * Do double-checked idiom to avoid locking where resource is already available, loaded */ // @formatter:on checkDestroyed(); Object r = mResourceMap.get(aName); // if no resource found, return null if (r == null) { return null; } // if not a ParameterizedDataResource, it is an error if (!(r instanceof ParameterizedDataResource)) { throw new ResourceAccessException(ResourceAccessException.PARAMETERS_NOT_ALLOWED, new Object[] { aName }); } ParameterizedDataResource pdr = (ParameterizedDataResource) r; // get a particular DataResource instance for the specified parameters DataResource dr; try { dr = pdr.getDataResource(aParams); } catch (ResourceInitializationException e) { throw new ResourceAccessException(e); } // see if we've already encountered this DataResource under this resource name List<Object> nameAndResource = new ArrayList<>(2); nameAndResource.add(aName); nameAndResource.add(dr); Object resourceInstance = mParameterizedResourceInstanceMap.get(nameAndResource); if (resourceInstance != null) { return resourceInstance; } synchronized (mParameterizedResourceInstanceMap) { // double-check idiom resourceInstance = mParameterizedResourceInstanceMap.get(nameAndResource); if (resourceInstance != null) { return resourceInstance; } // We haven't encountered this before. See if we need to instantiate a // SharedResourceObject Class<?> sharedResourceObjectClass = mParameterizedResourceImplClassMap.get(aName); if (sharedResourceObjectClass != EMPTY_RESOURCE_CLASS) { try { SharedResourceObject sro = (SharedResourceObject) sharedResourceObjectClass.newInstance(); sro.load(dr); mParameterizedResourceInstanceMap.put(nameAndResource, sro); return sro; } catch (InstantiationException e) { throw new ResourceAccessException(e); } catch (IllegalAccessException e) { throw new ResourceAccessException(e); } catch (ResourceInitializationException e) { throw new ResourceAccessException(e); } } else // no impl. class - just return the DataResource { mParameterizedResourceInstanceMap.put(nameAndResource, dr); return dr; } } } @Override @SuppressWarnings("unchecked") public Class<?> getResourceClass(String aName) { checkDestroyed(); Object r = mResourceMap.get(aName); if (r == null) // no such resource { return null; } // if this is a ParameterizedDataResource, look up its class if (r instanceof ParameterizedDataResource) { Class<?> customResourceClass = mParameterizedResourceImplClassMap.get(aName); if (customResourceClass == EMPTY_RESOURCE_CLASS) { // return the default class return DataResource_impl.class; } return customResourceClass; } else { // return r's Class // could be, for return (Class<? extends Resource>) r.getClass(); } } @Override public InputStream getResourceAsStream(String aKey, String... aParams) throws ResourceAccessException { return getResourceAsStreamCommon(getResource(aKey, aParams)); } @Override public InputStream getResourceAsStream(String aKey) throws ResourceAccessException { return getResourceAsStreamCommon(getResource(aKey)); } private InputStream getResourceAsStreamCommon(Object resource) throws ResourceAccessException { checkDestroyed(); try { if (resource != null && resource instanceof DataResource) { return ((DataResource) resource).getInputStream(); } else { return null; } } catch (IOException e) { throw new ResourceAccessException(e); } } private URL getResourceAsStreamCommonUrl(Object resource) { if (resource != null && resource instanceof DataResource) { return ((DataResource) resource).getUrl(); } else { return null; } } @Override public URL getResourceURL(String aKey, String... aParams) throws ResourceAccessException { return getResourceAsStreamCommonUrl(getResource(aKey, aParams)); } @Override public URL getResourceURL(String aKey) throws ResourceAccessException { return getResourceAsStreamCommonUrl(getResource(aKey)); } //@formatter:off /* * (non-Javadoc) * * This method is called during Resource Initialization, * - only for resources which are "local", that is, instances of ResourceCreationSpecifier * - and therefore might have external resource declarations * * Compare with resolveAndValidateResourceDependencies, called for resource binding resolution. * * @see ResourceManager#initializeExternalResources(ResourceManagerConfiguration, String, * Map<String, Object>) */ //@formatter:on @Override public synchronized void initializeExternalResources(ResourceManagerConfiguration aConfiguration, String aQualifiedContextName, Map<String, Object> aAdditionalParams) throws ResourceInitializationException { // register resources checkDestroyed(); // set up aAdditionalParams to have this resource manager if not already set // so that External Resource instances created from this use this creating/owning // resource manager as the value returned from their getResourceManager call // see https://issues.apache.org/jira/browse/UIMA-5148 if (null == aAdditionalParams) { aAdditionalParams = new HashMap<>(); } if (!aAdditionalParams.containsKey(Resource.PARAM_RESOURCE_MANAGER)) { aAdditionalParams.put(Resource.PARAM_RESOURCE_MANAGER, this); } // set up aAdditionalParams to have this resource manager if not already set // so that External Resource instances created from this use this creating/owning // UIMA Context so that getResourceManager works // see https://issues.apache.org/jira/browse/UIMA-5153 ExternalResourceDescription[] resources = aConfiguration.getExternalResources(); for (int i = 0; i < resources.length; i++) { String name = resources[i].getName(); // check for existing resource registration under this name ResourceRegistration registration = mInternalResourceRegistrationMap.get(name); if (registration == null) { registerResource(name, resources[i], aQualifiedContextName, aAdditionalParams); } else { // log a message if the resource definitions are not identical if (!registration.description.equals(resources[i])) { // if the resource was overridden in an enclosing aggregate, use an INFO level message. // if not (e.g. sibling annotators declare the same resource name), it's a WARNING. if (aQualifiedContextName.startsWith(registration.definingContext)) { UIMAFramework.getLogger().logrb(Level.CONFIG, ResourceManager_impl.class.getName(), "initializeExternalResources", LOG_RESOURCE_BUNDLE, "UIMA_overridden_resource__CONFIG", new Object[] { name, aQualifiedContextName, registration.definingContext }); } else { UIMAFramework.getLogger().logrb(Level.WARNING, ResourceManager_impl.class.getName(), "initializeExternalResources", LOG_RESOURCE_BUNDLE, "UIMA_duplicate_resource_name__WARNING", new Object[] { name, registration.definingContext, aQualifiedContextName }); } } } } // apply bindings ExternalResourceBinding[] bindings = aConfiguration.getExternalResourceBindings(); for (int i = 0; i < bindings.length; i++) { ResourceRegistration registration = mInternalResourceRegistrationMap .get(bindings[i].getResourceName()); if (registration == null) { throw new ResourceInitializationException( ResourceInitializationException.UNKNOWN_RESOURCE_NAME, new Object[] { bindings[i].getResourceName(), bindings[i].getSourceUrlString() }); } mResourceMap.put(aQualifiedContextName + bindings[i].getKey(), registration.resource); // record the link from key to resource class (for parameterized resources only) Class<?> impl = mInternalParameterizedResourceImplClassMap.get(bindings[i].getResourceName()); mParameterizedResourceImplClassMap.put(aQualifiedContextName + bindings[i].getKey(), (impl == null) ? EMPTY_RESOURCE_CLASS : impl); } } /* * (non-Javadoc) * * Called during resource initialization, when the resource has external resource bindings, to * resolve those bindings * * @see ResourceManager#resolveAndValidateResourceDependencies(ExternalResourceDependency[], * String) * * Multi-threaded. Partial avoidance of re-resolving, but if a resource fails to resolve, it will * be reattempted on every call */ @Override public synchronized void resolveAndValidateResourceDependencies( ExternalResourceDependency[] aDependencies, String aQualifiedContextName) throws ResourceInitializationException { checkDestroyed(); for (int i = 0; i < aDependencies.length; i++) { // get resource String qname = aQualifiedContextName + aDependencies[i].getKey(); // may or may not implement Resource, may implement SharedResourceObject Object resourceImpl = mResourceMap.get(qname); if (resourceImpl == null) { // no resource found - try to look up in classpath/datapath var relativeUrl = aDependencies[i].getKey(); var absUrl = getRelativePathResolver().resolveRelativePath(relativeUrl); if (absUrl != null) { // found - create a DataResource object and store it in the mResourceMap var spec = new FileResourceSpecifier_impl(); spec.setFileUrl(absUrl.toString()); // produces an instance of DataResourceImpl resourceImpl = UIMAFramework.produceResource(spec, null); mResourceMap.put(qname, resourceImpl); } } if (resourceImpl == null) { // still no resource found - throw exception if required if (!aDependencies[i].isOptional()) { throw new ResourceInitializationException( ResourceInitializationException.RESOURCE_DEPENDENCY_NOT_SATISFIED, new Object[] { aDependencies[i].getKey(), aDependencies[i].getSourceUrlString() }); } } else { // resource not null // make sure resource exists and implements the correct interface try { String name = aDependencies[i].getInterfaceName(); if (name != null && name.length() > 0) { Class<?> theInterface = loadUserClass(name); Class<?> resourceClass = getResourceClass(qname); if (!theInterface.isAssignableFrom(resourceClass)) { throw new ResourceInitializationException( ResourceInitializationException.RESOURCE_DOES_NOT_IMPLEMENT_INTERFACE, new Object[] { qname, aDependencies[i].getInterfaceName(), aDependencies[i].getSourceUrlString() }); } } } catch (ClassNotFoundException e) { throw new ResourceInitializationException(ResourceInitializationException.CLASS_NOT_FOUND, new Object[] { aDependencies[i].getInterfaceName(), aDependencies[i].getSourceUrlString() }); } } } } /** * Instantiates a resource and inserts it in the internal resource map. */ private void registerResource(String aName, ExternalResourceDescription aResourceDescription, String aDefiningContext, Map<String, Object> aResourceInitParams) throws ResourceInitializationException { // add the relative path resolver to the resource init. params Map<String, Object> initParams = (aResourceInitParams == null) ? new HashMap<>() : new HashMap<>(aResourceInitParams); initParams.put(DataResource.PARAM_RELATIVE_PATH_RESOLVER, getRelativePathResolver()); // determine if verification mode is on. If so, we don't want to load the resource data boolean verificationMode = initParams .containsKey(AnalysisEngineImplBase.PARAM_VERIFICATION_MODE); // create the initial resource using the resource factory Resource r = UIMAFramework.produceResource(aResourceDescription.getResourceSpecifier(), initParams); // load implementation class (if any) and ensure that it implements // SharedResourceObject String implementationName = aResourceDescription.getImplementationName(); Class<?> implClass = null; // might or might not impl Resource Object implInstance = r; // what will be registered, might be the Resource, or its // implementation if (implementationName != null && implementationName.length() > 0) { try { implClass = loadUserClass(implementationName); } catch (ClassNotFoundException e) { throw new ResourceInitializationException(ResourceInitializationException.CLASS_NOT_FOUND, new Object[] { implementationName, aResourceDescription.getSourceUrlString() }, e); } if (!SharedResourceObject.class.isAssignableFrom(implClass)) { throw new ResourceInitializationException( ResourceInitializationException.NOT_A_SHARED_RESOURCE_OBJECT, new Object[] { implementationName, aResourceDescription.getSourceUrlString() }); } } // is this a DataResource? if (r instanceof DataResource) { // instantiate and load the resource object if there is one if (implClass != null) { try { SharedResourceObject sro = (SharedResourceObject) implClass.newInstance(); if (!verificationMode) { sro.load((DataResource) r); } implInstance = sro; // so the implementation is registered, for DataResources } catch (InstantiationException e) { throw new ResourceInitializationException( ResourceInitializationException.COULD_NOT_INSTANTIATE, new Object[] { implClass.getName(), aResourceDescription.getSourceUrlString() }, e); } catch (IllegalAccessException e) { throw new ResourceInitializationException( ResourceInitializationException.COULD_NOT_INSTANTIATE, new Object[] { implClass.getName(), aResourceDescription.getSourceUrlString() }, e); } } } // is it a ParameterizedDataResource? else if (r instanceof ParameterizedDataResource) { // we can't load the SharedResourceObject now, but we need to remember // which class it is for later when we get a request with parameters mInternalParameterizedResourceImplClassMap.put(aName, (null == implClass) ? EMPTY_RESOURCE_CLASS : implClass); } else { // it is some other type of Resource // it is an error to specify an implementation class in this case if (implClass != null) { throw new ResourceInitializationException( ResourceInitializationException.NOT_A_DATA_RESOURCE, new Object[] { implClass.getName(), aName, r.getClass().getName(), aResourceDescription.getSourceUrlString() }); } } // put resource or its implementation (for DataResources) in internal map for later retrieval ResourceRegistration registration = new ResourceRegistration(implInstance, aResourceDescription, aDefiningContext); mInternalResourceRegistrationMap.put(aName, registration); } @Override public CasManager getCasManager() { // Optimization for case where mCasManager already created // Some sync contention was observed - this makes it less. UIMA-4012 if (mCasManager != null) { return mCasManager; } synchronized (casManagerMonitor) { if (mCasManager == null) { mCasManager = new CasManager_impl(this); } return mCasManager; } } @Override public void setCasManager(CasManager aCasManager) { synchronized (casManagerMonitor) { if (mCasManager == null) { mCasManager = aCasManager; } else { throw new UIMA_IllegalStateException(UIMA_IllegalStateException.CANNOT_SET_CAS_MANAGER, new Object[0]); } } } // This method overridden by subclass for pear wrapper protected RelativePathResolver getRelativePathResolver() { return mRelativePathResolver; } @Deprecated(since = "3.3.0") @Override public Map<String, XMLizable> getImportCache() { return importCache; } /** * Was used during import resolving until UIMA 3.2.0. Is no longer used since since import * resolving has been migrated to the * {@code org.apache.uima.resource.metadata.impl.ImportResolver}. * * @deprecated No longer used. Scheduled for removal in UIMA 4.0. */ @Deprecated(since = "3.3.0") public Map<String, Set<String>> getImportUrlsCache() { Misc.decreasingWithTrace(IMPORT_URL_CACHE_WARNING_THROTTLE, "ResourceManager_impl.getImportUrlsCache() should not be called. It is no longer " + "filled during import resolving. The method will be removed in a future UIMA " + "version.", UIMAFramework.getLogger()); return importUrlsCache; } @Override public <N> Class<N> loadUserClass(String name) throws ClassNotFoundException { return Class_TCCL.forName(name, this, true); } public static Class<?> loadUserClass(String name, ResourceManager rm) throws ClassNotFoundException { return Class_TCCL.forName(name, rm, true); } public static Class<?> loadUserClassOrThrow(String name, ResourceManager rm, ResourceSpecifier aSpecifier) throws ResourceInitializationException { try { return Class_TCCL.forName(name, rm, true); } catch (ClassNotFoundException e) { throw new ResourceInitializationException(ResourceInitializationException.CLASS_NOT_FOUND, new Object[] { name, aSpecifier.getSourceUrlString() }, e); } } @Override public void destroy() { boolean alreadyDestroyed = isDestroyed.getAndSet(true); if (alreadyDestroyed) { return; } for (ResourceRegistration rr : mInternalResourceRegistrationMap.values()) { if (rr.resource instanceof Resource) { ((Resource) rr.resource).destroy(); } } for (Object r : mParameterizedResourceInstanceMap.values()) { if (r instanceof Resource) { ((Resource) r).destroy(); } } if (uimaCL != null) { try { uimaCL.close(); } catch (IOException e) { UIMAFramework.getLogger().logrb(Level.WARNING, ResourceManager_impl.class.getName(), "destroy", LOG_RESOURCE_BUNDLE, "UIMA_Classloader_close_exception", e); } } // https://issues.apache.org/jira/browse/UIMA-5935 Map<ResourceManager, Map<PearAnalysisEngineWrapper.StringPair, ResourceManager>> cachedResourceManagers = PearAnalysisEngineWrapper .getCachedResourceManagers(); synchronized (cachedResourceManagers) { Map<StringPair, ResourceManager> c1 = cachedResourceManagers.get(this); if (c1 != null) { for (ResourceManager rm : c1.values()) { rm.destroy(); } } } // not clearing mResourcMap, mInternalResourceRegistrationMap, // mParameterizedResourceImplClassMap, // mInternalParameterizedResourceImplClassMap, mParameterizedResourceInstanceMap // because these could be shared with other resource managers // not clearing importCache, importUrlsCache - might be in used by other Resource Managers // (shared) // no destroy of caspool at this time } @Override public List<Object> getExternalResources() { List<Object> rs = new ArrayList<>(); for (ResourceRegistration r : mInternalResourceRegistrationMap.values()) { if (!(r instanceof ParameterizedDataResource)) { rs.add(r.resource); } } for (Object r : mParameterizedResourceInstanceMap.values()) { rs.add(r); } return rs; } }
apache/druid
36,264
indexing-service/src/main/java/org/apache/druid/indexing/overlord/ForkingTaskRunner.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.indexing.overlord; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; import com.google.common.io.ByteSink; import com.google.common.io.ByteStreams; import com.google.common.io.FileWriteMode; import com.google.common.io.Files; import com.google.common.math.IntMath; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.inject.Inject; import org.apache.druid.guice.annotations.Self; import org.apache.druid.indexer.RunnerTaskState; import org.apache.druid.indexer.TaskLocation; import org.apache.druid.indexer.TaskStatus; import org.apache.druid.indexing.common.TaskStorageDirTracker; import org.apache.druid.indexing.common.config.TaskConfig; import org.apache.druid.indexing.common.task.Task; import org.apache.druid.indexing.common.tasklogs.ConsoleLoggingEnforcementConfigurationFactory; import org.apache.druid.indexing.common.tasklogs.LogUtils; import org.apache.druid.indexing.overlord.autoscaling.ScalingStats; import org.apache.druid.indexing.overlord.config.ForkingTaskRunnerConfig; import org.apache.druid.indexing.worker.config.WorkerConfig; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.FileUtils; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.concurrent.Execs; import org.apache.druid.java.util.common.io.Closer; import org.apache.druid.java.util.common.lifecycle.LifecycleStart; import org.apache.druid.java.util.common.lifecycle.LifecycleStop; import org.apache.druid.java.util.emitter.EmittingLogger; import org.apache.druid.query.DruidMetrics; import org.apache.druid.server.DruidNode; import org.apache.druid.server.log.StartupLoggingConfig; import org.apache.druid.server.metrics.MonitorsConfig; import org.apache.druid.server.metrics.WorkerTaskCountStatsProvider; import org.apache.druid.tasklogs.TaskLogPusher; import org.apache.druid.tasklogs.TaskLogStreamer; import org.apache.druid.utils.JvmUtils; import org.joda.time.DateTime; import org.joda.time.Interval; import javax.annotation.Nullable; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.math.RoundingMode; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; /** * Runs tasks in separate processes using the "internal peon" verb. */ public class ForkingTaskRunner extends BaseRestorableTaskRunner<ForkingTaskRunner.ForkingTaskRunnerWorkItem> implements TaskLogStreamer, WorkerTaskCountStatsProvider { private static final EmittingLogger LOGGER = new EmittingLogger(ForkingTaskRunner.class); private static final String CHILD_PROPERTY_PREFIX = "druid.indexer.fork.property."; /** * Properties to add on Java 11+. When updating this list, update all four: * 1) ForkingTaskRunner#STRONG_ENCAPSULATION_PROPERTIES (here) --> * 2) docs/operations/java.md, "Strong encapsulation" section --> * 3) pom.xml, jdk.strong.encapsulation.argLine --> * 4) examples/bin/run-java script */ private static final List<String> STRONG_ENCAPSULATION_PROPERTIES = ImmutableList.of( "--add-exports=java.base/jdk.internal.misc=ALL-UNNAMED", "--add-exports=java.base/jdk.internal.ref=ALL-UNNAMED", "--add-opens=java.base/java.nio=ALL-UNNAMED", "--add-opens=java.base/sun.nio.ch=ALL-UNNAMED", "--add-opens=java.base/jdk.internal.ref=ALL-UNNAMED", "--add-opens=java.base/java.io=ALL-UNNAMED", "--add-opens=java.base/java.lang=ALL-UNNAMED", "--add-opens=jdk.management/com.sun.management.internal=ALL-UNNAMED" ); private final ForkingTaskRunnerConfig config; private final Properties props; private final TaskLogPusher taskLogPusher; private final DruidNode node; private final ListeningExecutorService exec; private final PortFinder portFinder; private final StartupLoggingConfig startupLoggingConfig; private final WorkerConfig workerConfig; private volatile int numProcessorsPerTask = -1; private volatile boolean stopping = false; private final AtomicLong lastReportedFailedTaskCount = new AtomicLong(); private final AtomicLong failedTaskCount = new AtomicLong(); private final AtomicLong successfulTaskCount = new AtomicLong(); private final AtomicLong lastReportedSuccessfulTaskCount = new AtomicLong(); @Inject public ForkingTaskRunner( ForkingTaskRunnerConfig config, TaskConfig taskConfig, WorkerConfig workerConfig, Properties props, TaskLogPusher taskLogPusher, ObjectMapper jsonMapper, @Self DruidNode node, StartupLoggingConfig startupLoggingConfig, TaskStorageDirTracker dirTracker ) { super(jsonMapper, taskConfig, dirTracker); this.config = config; this.props = props; this.taskLogPusher = taskLogPusher; this.node = node; this.portFinder = new PortFinder(config.getStartPort(), config.getEndPort(), config.getPorts()); this.startupLoggingConfig = startupLoggingConfig; this.workerConfig = workerConfig; this.exec = MoreExecutors.listeningDecorator( Execs.multiThreaded(workerConfig.getCapacity(), "forking-task-runner-%d") ); } @Override public ListenableFuture<TaskStatus> run(final Task task) { synchronized (tasks) { tasks.computeIfAbsent( task.getId(), k -> new ForkingTaskRunnerWorkItem( task, exec.submit( new Callable<>() { @Override public TaskStatus call() { final TaskStorageDirTracker.StorageSlot storageSlot; try { storageSlot = getTracker().pickStorageSlot(task.getId()); } catch (RuntimeException e) { LOG.warn(e, "Failed to get storage slot for task [%s], cannot schedule.", task.getId()); return TaskStatus.failure( task.getId(), StringUtils.format("Failed to get storage slot due to error [%s]", e.getMessage()) ); } final File taskDir = new File(storageSlot.getDirectory(), task.getId()); final String attemptId = String.valueOf(getNextAttemptID(taskDir)); final File attemptDir = Paths.get(taskDir.getAbsolutePath(), "attempt", attemptId).toFile(); final ProcessHolder processHolder; final String childHost = node.getHost(); int childPort = -1; int tlsChildPort = -1; if (node.isEnablePlaintextPort()) { childPort = portFinder.findUnusedPort(); } if (node.isEnableTlsPort()) { tlsChildPort = portFinder.findUnusedPort(); } final TaskLocation taskLocation = TaskLocation.create(childHost, childPort, tlsChildPort); try { final Closer closer = Closer.create(); try { final File taskFile = new File(taskDir, "task.json"); final File statusFile = new File(attemptDir, "status.json"); final File logFile = new File(taskDir, "log"); final File reportsFile = new File(attemptDir, "report.json"); // time to adjust process holders synchronized (tasks) { final ForkingTaskRunnerWorkItem taskWorkItem = tasks.get(task.getId()); if (taskWorkItem == null) { LOGGER.makeAlert("TaskInfo disappeared!").addData("task", task.getId()).emit(); throw new ISE("TaskInfo disappeared for task[%s]!", task.getId()); } if (taskWorkItem.shutdown) { throw new IllegalStateException("Task has been shut down!"); } if (taskWorkItem.processHolder != null) { LOGGER.makeAlert("TaskInfo already has a processHolder") .addData("task", task.getId()) .emit(); throw new ISE("TaskInfo already has processHolder for task[%s]!", task.getId()); } final CommandListBuilder command = new CommandListBuilder(); final String taskClasspath; if (task.getClasspathPrefix() != null && !task.getClasspathPrefix().isEmpty()) { taskClasspath = Joiner.on(File.pathSeparator).join( task.getClasspathPrefix(), config.getClasspath() ); } else { taskClasspath = config.getClasspath(); } command.add(config.getJavaCommand()); if (JvmUtils.majorVersion() >= 11) { command.addAll(STRONG_ENCAPSULATION_PROPERTIES); } command.add("-cp"); command.add(taskClasspath); if (numProcessorsPerTask < 1) { // numProcessorsPerTask is set by start() throw new ISE("Not started"); } command.add(StringUtils.format("-XX:ActiveProcessorCount=%d", numProcessorsPerTask)); command.addAll(new QuotableWhiteSpaceSplitter(config.getJavaOpts())); command.addAll(config.getJavaOptsArray()); // Override task specific javaOpts Object taskJavaOpts = task.getContextValue( ForkingTaskRunnerConfig.JAVA_OPTS_PROPERTY ); if (taskJavaOpts != null) { command.addAll(new QuotableWhiteSpaceSplitter((String) taskJavaOpts)); } // Override task specific javaOptsArray try { List<String> taskJavaOptsArray = jsonMapper.convertValue( task.getContextValue(ForkingTaskRunnerConfig.JAVA_OPTS_ARRAY_PROPERTY), new TypeReference<>() {} ); if (taskJavaOptsArray != null) { command.addAll(taskJavaOptsArray); } } catch (Exception e) { throw new IllegalArgumentException( ForkingTaskRunnerConfig.JAVA_OPTS_ARRAY_PROPERTY + " in context of task: " + task.getId() + " must be an array of strings.", e ); } for (String propName : props.stringPropertyNames()) { for (String allowedPrefix : config.getAllowedPrefixes()) { // See https://github.com/apache/druid/issues/1841 if (propName.startsWith(allowedPrefix) && !ForkingTaskRunnerConfig.JAVA_OPTS_PROPERTY.equals(propName) && !ForkingTaskRunnerConfig.JAVA_OPTS_ARRAY_PROPERTY.equals(propName) ) { command.addSystemProperty(propName, props.getProperty(propName)); } } } // Override child JVM specific properties for (String propName : props.stringPropertyNames()) { if (propName.startsWith(CHILD_PROPERTY_PREFIX)) { command.addSystemProperty( propName.substring(CHILD_PROPERTY_PREFIX.length()), props.getProperty(propName) ); } } // Override task specific properties final Map<String, Object> context = task.getContext(); if (context != null) { for (String propName : context.keySet()) { if (propName.startsWith(CHILD_PROPERTY_PREFIX)) { Object contextValue = task.getContextValue(propName); if (contextValue != null) { command.addSystemProperty( propName.substring(CHILD_PROPERTY_PREFIX.length()), String.valueOf(contextValue) ); } } } } // add the attemptId as a system property command.addSystemProperty("attemptId", "1"); // Add dataSource, taskId and taskType for metrics or logging command.addSystemProperty( MonitorsConfig.METRIC_DIMENSION_PREFIX + DruidMetrics.DATASOURCE, task.getDataSource() ); command.addSystemProperty( MonitorsConfig.METRIC_DIMENSION_PREFIX + DruidMetrics.TASK_ID, task.getId() ); command.addSystemProperty( MonitorsConfig.METRIC_DIMENSION_PREFIX + DruidMetrics.TASK_TYPE, task.getType() ); command.addSystemProperty( MonitorsConfig.METRIC_DIMENSION_PREFIX + DruidMetrics.GROUP_ID, task.getGroupId() ); command.addSystemProperty("druid.host", childHost); command.addSystemProperty("druid.plaintextPort", childPort); command.addSystemProperty("druid.tlsPort", tlsChildPort); // Let tasks know where they are running on. // This information is used in native parallel indexing with shuffle. command.addSystemProperty("druid.task.executor.service", node.getServiceName()); command.addSystemProperty("druid.task.executor.host", node.getHost()); command.addSystemProperty("druid.task.executor.plaintextPort", node.getPlaintextPort()); command.addSystemProperty("druid.task.executor.enablePlaintextPort", node.isEnablePlaintextPort()); command.addSystemProperty("druid.task.executor.tlsPort", node.getTlsPort()); command.addSystemProperty("druid.task.executor.enableTlsPort", node.isEnableTlsPort()); command.addSystemProperty("log4j2.configurationFactory", ConsoleLoggingEnforcementConfigurationFactory.class.getName()); command.addSystemProperty("druid.indexer.task.baseTaskDir", storageSlot.getDirectory().getAbsolutePath()); command.addSystemProperty("druid.indexer.task.tmpStorageBytesPerTask", storageSlot.getNumBytes()); command.add("org.apache.druid.cli.Main"); command.add("internal"); command.add("peon"); command.add(taskDir.toString()); command.add(attemptId); String nodeType = task.getNodeType(); if (nodeType != null) { command.add("--nodeType"); command.add(nodeType); } // If the task type is queryable, we need to load broadcast segments on the peon, used for // join queries. This is replaced by --loadBroadcastDatasourceMode option, but is preserved here // for backwards compatibility and can be removed in a future release. if (task.supportsQueries()) { command.add("--loadBroadcastSegments"); command.add("true"); } command.add("--loadBroadcastDatasourceMode"); command.add(task.getBroadcastDatasourceLoadingSpec().getMode().toString()); if (!taskFile.exists()) { jsonMapper.writeValue(taskFile, task); } LOGGER.info( "Running command[%s]", getMaskedCommand(startupLoggingConfig.getMaskProperties(), command.getCommandList()) ); taskWorkItem.processHolder = runTaskProcess(command.getCommandList(), logFile, taskLocation); processHolder = taskWorkItem.processHolder; processHolder.registerWithCloser(closer); } TaskRunnerUtils.notifyLocationChanged(listeners, task.getId(), taskLocation); TaskRunnerUtils.notifyStatusChanged( listeners, task.getId(), TaskStatus.running(task.getId()) ); LOGGER.info("Logging output of task[%s] to file[%s].", task.getId(), logFile); final int exitCode = waitForTaskProcessToComplete(task, processHolder, logFile, reportsFile); final TaskStatus status; if (exitCode == 0) { LOGGER.info("Process exited successfully for task[%s]", task.getId()); // Process exited successfully status = jsonMapper.readValue(statusFile, TaskStatus.class); } else { LOGGER.error("Process exited with code[%d] for task[%s]", exitCode, task.getId()); // Process exited unsuccessfully status = TaskStatus.failure( task.getId(), StringUtils.format( "Task execution process exited unsuccessfully with code[%s]. " + "See middleManager logs for more details.", exitCode ) ); } if (status.isSuccess()) { successfulTaskCount.incrementAndGet(); } else { failedTaskCount.incrementAndGet(); } TaskRunnerUtils.notifyStatusChanged(listeners, task.getId(), status); return status; } catch (Throwable t) { throw closer.rethrow(t); } finally { closer.close(); } } catch (Throwable t) { LOGGER.info(t, "Exception caught during execution"); throw new RuntimeException(t); } finally { try { synchronized (tasks) { final ForkingTaskRunnerWorkItem taskWorkItem = tasks.remove(task.getId()); if (taskWorkItem != null && taskWorkItem.processHolder != null) { taskWorkItem.processHolder.shutdown(); } if (!stopping) { saveRunningTasks(); } } if (node.isEnablePlaintextPort()) { portFinder.markPortUnused(childPort); } if (node.isEnableTlsPort()) { portFinder.markPortUnused(tlsChildPort); } getTracker().returnStorageSlot(storageSlot); try { if (!stopping && taskDir.exists()) { FileUtils.deleteDirectory(taskDir); LOGGER.info("Removing task directory: %s", taskDir); } } catch (Exception e) { LOGGER.makeAlert(e, "Failed to delete task directory") .addData("taskDir", taskDir.toString()) .addData("task", task.getId()) .emit(); } } catch (Exception e) { LOGGER.error(e, "Suppressing exception caught while cleaning up task"); } } } } ) ) ); saveRunningTasks(); return tasks.get(task.getId()).getResult(); } } @VisibleForTesting ProcessHolder runTaskProcess(List<String> command, File logFile, TaskLocation taskLocation) throws IOException { return new ProcessHolder( new ProcessBuilder(ImmutableList.copyOf(command)).redirectErrorStream(true).start(), logFile, taskLocation ); } @VisibleForTesting int waitForTaskProcessToComplete(Task task, ProcessHolder processHolder, File logFile, File reportsFile) throws IOException, InterruptedException { final ByteSink logSink = Files.asByteSink(logFile, FileWriteMode.APPEND); // This will block for a while. So we append the thread information with more details final String priorThreadName = Thread.currentThread().getName(); Thread.currentThread().setName(StringUtils.format("%s-[%s]", priorThreadName, task.getId())); try (final OutputStream toLogfile = logSink.openStream()) { ByteStreams.copy(processHolder.process.getInputStream(), toLogfile); return processHolder.process.waitFor(); } finally { Thread.currentThread().setName(priorThreadName); // Upload task logs try { taskLogPusher.pushTaskLog(task.getId(), logFile); } catch (IOException e) { LOGGER.error("Task[%s] failed to push task logs to [%s]: Exception[%s]", task.getId(), logFile.getName(), e.getMessage()); } if (reportsFile.exists()) { try { taskLogPusher.pushTaskReports(task.getId(), reportsFile); } catch (IOException e) { LOGGER.error("Task[%s] failed to push task reports to [%s]: Exception[%s]", task.getId(), reportsFile.getName(), e.getMessage()); } } } } @Override @LifecycleStop public void stop() { stopping = true; exec.shutdown(); synchronized (tasks) { for (ForkingTaskRunnerWorkItem taskWorkItem : tasks.values()) { shutdownTaskProcess(taskWorkItem); } } final DateTime start = DateTimes.nowUtc(); final long timeout = new Interval(start, taskConfig.getGracefulShutdownTimeout()).toDurationMillis(); // Things should be terminating now. Wait for it to happen so logs can be uploaded and all that good stuff. LOGGER.info("Waiting up to %,dms for shutdown.", timeout); if (timeout > 0) { try { final boolean terminated = exec.awaitTermination(timeout, TimeUnit.MILLISECONDS); final long elapsed = System.currentTimeMillis() - start.getMillis(); if (terminated) { LOGGER.info("Finished stopping in %,dms.", elapsed); } else { final Set<String> stillRunning; synchronized (tasks) { stillRunning = ImmutableSet.copyOf(tasks.keySet()); } LOGGER.makeAlert("Failed to stop forked tasks") .addData("stillRunning", stillRunning) .addData("elapsed", elapsed) .emit(); LOGGER.warn( "Executor failed to stop after %,dms, not waiting for it! Tasks still running: [%s]", elapsed, Joiner.on("; ").join(stillRunning) ); } } catch (InterruptedException e) { LOGGER.warn(e, "Interrupted while waiting for executor to finish."); Thread.currentThread().interrupt(); } } else { LOGGER.warn("Ran out of time, not waiting for executor to finish!"); } } @Override public void shutdown(final String taskid, String reason) { LOGGER.info("Shutdown [%s] because: [%s]", taskid, reason); final ForkingTaskRunnerWorkItem taskInfo; synchronized (tasks) { taskInfo = tasks.get(taskid); if (taskInfo == null) { LOGGER.info("Ignoring request to cancel unknown task: %s", taskid); return; } taskInfo.shutdown = true; shutdownTaskProcess(taskInfo); } } @Override public Collection<TaskRunnerWorkItem> getRunningTasks() { synchronized (tasks) { final List<TaskRunnerWorkItem> ret = new ArrayList<>(); for (final ForkingTaskRunnerWorkItem taskWorkItem : tasks.values()) { if (taskWorkItem.processHolder != null) { ret.add(taskWorkItem); } } return ret; } } @Override public Collection<TaskRunnerWorkItem> getPendingTasks() { synchronized (tasks) { final List<TaskRunnerWorkItem> ret = new ArrayList<>(); for (final ForkingTaskRunnerWorkItem taskWorkItem : tasks.values()) { if (taskWorkItem.processHolder == null) { ret.add(taskWorkItem); } } return ret; } } @Nullable @Override public RunnerTaskState getRunnerTaskState(String taskId) { final ForkingTaskRunnerWorkItem workItem = tasks.get(taskId); if (workItem == null) { return null; } else { if (workItem.processHolder == null) { return RunnerTaskState.PENDING; } else if (workItem.processHolder.process.isAlive()) { return RunnerTaskState.RUNNING; } else { return RunnerTaskState.NONE; } } } @Override public Optional<ScalingStats> getScalingStats() { return Optional.absent(); } @Override @LifecycleStart public void start() { setNumProcessorsPerTask(); } @Override public Optional<InputStream> streamTaskLog(final String taskid, final long offset) throws IOException { final ProcessHolder processHolder; synchronized (tasks) { final ForkingTaskRunnerWorkItem taskWorkItem = tasks.get(taskid); if (taskWorkItem != null && taskWorkItem.processHolder != null) { processHolder = taskWorkItem.processHolder; } else { return Optional.absent(); } } return Optional.of(LogUtils.streamFile(processHolder.logFile, offset)); } /** * Close task output stream (input stream of process) sending EOF telling process to terminate, destroying the process * if an exception is encountered. */ private void shutdownTaskProcess(ForkingTaskRunnerWorkItem taskInfo) { if (taskInfo.processHolder != null) { // Will trigger normal failure mechanisms due to process exit LOGGER.info("Closing output stream to task[%s].", taskInfo.getTask().getId()); try { taskInfo.processHolder.process.getOutputStream().close(); } catch (Exception e) { LOGGER.warn(e, "Failed to close stdout to task[%s]. Destroying task.", taskInfo.getTask().getId()); taskInfo.processHolder.process.destroy(); } } } public static String getMaskedCommand(List<String> maskedProperties, List<String> command) { final Set<String> maskedPropertiesSet = Sets.newHashSet(maskedProperties); final Iterator<String> maskedIterator = command.stream().map(element -> { String[] splits = element.split("=", 2); if (splits.length == 2) { for (String masked : maskedPropertiesSet) { if (splits[0].contains(masked)) { return StringUtils.format("%s=%s", splits[0], "<masked>"); } } } return element; }).iterator(); return Joiner.on(" ").join(maskedIterator); } @Override public Map<String, Long> getTotalTaskSlotCount() { return Map.of(workerConfig.getCategory(), getWorkerTotalTaskSlotCount()); } @Override public Map<String, Long> getIdleTaskSlotCount() { return Map.of( workerConfig.getCategory(), Math.max(getWorkerTotalTaskSlotCount() - getWorkerUsedTaskSlotCount(), 0) ); } @Override public Map<String, Long> getUsedTaskSlotCount() { return Map.of(workerConfig.getCategory(), getWorkerUsedTaskSlotCount()); } @Override public Map<String, Long> getLazyTaskSlotCount() { return ImmutableMap.of(workerConfig.getCategory(), 0L); } @Override public Map<String, Long> getBlacklistedTaskSlotCount() { return ImmutableMap.of(workerConfig.getCategory(), 0L); } @Override public Long getWorkerFailedTaskCount() { long failedTaskCount = this.failedTaskCount.get(); long lastReportedFailedTaskCount = this.lastReportedFailedTaskCount.get(); this.lastReportedFailedTaskCount.set(failedTaskCount); return failedTaskCount - lastReportedFailedTaskCount; } @Override public Long getWorkerIdleTaskSlotCount() { return Math.max(getWorkerTotalTaskSlotCount() - getWorkerUsedTaskSlotCount(), 0); } @Override public Long getWorkerUsedTaskSlotCount() { return getTracker().getNumUsedSlots(); } @Override public Long getWorkerTotalTaskSlotCount() { return (long) workerConfig.getCapacity(); } @Override public String getWorkerCategory() { return workerConfig.getCategory(); } @Override public String getWorkerVersion() { return workerConfig.getVersion(); } @Override public Long getWorkerSuccessfulTaskCount() { long successfulTaskCount = this.successfulTaskCount.get(); long lastReportedSuccessfulTaskCount = this.lastReportedSuccessfulTaskCount.get(); this.lastReportedSuccessfulTaskCount.set(successfulTaskCount); return successfulTaskCount - lastReportedSuccessfulTaskCount; } @VisibleForTesting void setNumProcessorsPerTask() { // Divide number of available processors by the number of tasks. // This prevents various automatically-sized thread pools from being unreasonably large (we don't want each // task to size its pools as if it is the only thing on the entire machine). final int availableProcessors = JvmUtils.getRuntimeInfo().getAvailableProcessors(); numProcessorsPerTask = Math.max( 1, IntMath.divide(availableProcessors, workerConfig.getCapacity(), RoundingMode.CEILING) ); } protected static class ForkingTaskRunnerWorkItem extends TaskRunnerWorkItem { private final Task task; private volatile boolean shutdown = false; private volatile ProcessHolder processHolder = null; private ForkingTaskRunnerWorkItem( Task task, ListenableFuture<TaskStatus> statusFuture ) { super(task.getId(), statusFuture); this.task = task; } public Task getTask() { return task; } @Override public TaskLocation getLocation() { if (processHolder == null) { return TaskLocation.unknown(); } else { return processHolder.location; } } @Override public String getTaskType() { return task.getType(); } @Override public String getDataSource() { return task.getDataSource(); } } public static class ProcessHolder { private final Process process; private final File logFile; private final TaskLocation location; public ProcessHolder(Process process, File logFile, TaskLocation location) { this.process = process; this.logFile = logFile; this.location = location; } private void registerWithCloser(Closer closer) { closer.register(process.getInputStream()); closer.register(process.getOutputStream()); } private void shutdown() { process.destroy(); } } @VisibleForTesting static int getNextAttemptID(File taskDir) { File attemptDir = new File(taskDir, "attempt"); try { FileUtils.mkdirp(attemptDir); } catch (IOException e) { throw new ISE("Error creating directory", e); } int maxAttempt = Arrays.stream(attemptDir.listFiles(File::isDirectory)) .mapToInt(x -> Integer.parseInt(x.getName())) .max().orElse(0); // now make the directory File attempt = new File(attemptDir, String.valueOf(maxAttempt + 1)); try { FileUtils.mkdirp(attempt); } catch (IOException e) { throw new ISE("Error creating directory", e); } return maxAttempt + 1; } public static class CommandListBuilder { ArrayList<String> commandList = new ArrayList<>(); public CommandListBuilder add(String arg) { commandList.add(arg); return this; } public CommandListBuilder addSystemProperty(String property, int value) { return addSystemProperty(property, String.valueOf(value)); } public CommandListBuilder addSystemProperty(String property, long value) { return addSystemProperty(property, String.valueOf(value)); } public CommandListBuilder addSystemProperty(String property, boolean value) { return addSystemProperty(property, String.valueOf(value)); } public CommandListBuilder addSystemProperty(String property, String value) { return add(StringUtils.format("-D%s=%s", property, value)); } public CommandListBuilder addAll(Iterable<String> args) { for (String arg : args) { add(arg); } return this; } public ArrayList<String> getCommandList() { return commandList; } } }
googleapis/google-cloud-java
36,175
java-cloudsupport/proto-google-cloud-cloudsupport-v2/src/main/java/com/google/cloud/support/v2/ListCommentsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/support/v2/comment_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.support.v2; /** * * * <pre> * The response message for the ListComments endpoint. * </pre> * * Protobuf type {@code google.cloud.support.v2.ListCommentsResponse} */ public final class ListCommentsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.support.v2.ListCommentsResponse) ListCommentsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListCommentsResponse.newBuilder() to construct. private ListCommentsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListCommentsResponse() { comments_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListCommentsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.support.v2.CommentServiceProto .internal_static_google_cloud_support_v2_ListCommentsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.support.v2.CommentServiceProto .internal_static_google_cloud_support_v2_ListCommentsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.support.v2.ListCommentsResponse.class, com.google.cloud.support.v2.ListCommentsResponse.Builder.class); } public static final int COMMENTS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.support.v2.Comment> comments_; /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2.Comment comments = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.support.v2.Comment> getCommentsList() { return comments_; } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2.Comment comments = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.support.v2.CommentOrBuilder> getCommentsOrBuilderList() { return comments_; } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2.Comment comments = 1;</code> */ @java.lang.Override public int getCommentsCount() { return comments_.size(); } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2.Comment comments = 1;</code> */ @java.lang.Override public com.google.cloud.support.v2.Comment getComments(int index) { return comments_.get(index); } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2.Comment comments = 1;</code> */ @java.lang.Override public com.google.cloud.support.v2.CommentOrBuilder getCommentsOrBuilder(int index) { return comments_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.comments.list` requests. If unspecified, there * are no more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.comments.list` requests. If unspecified, there * are no more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < comments_.size(); i++) { output.writeMessage(1, comments_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < comments_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, comments_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.support.v2.ListCommentsResponse)) { return super.equals(obj); } com.google.cloud.support.v2.ListCommentsResponse other = (com.google.cloud.support.v2.ListCommentsResponse) obj; if (!getCommentsList().equals(other.getCommentsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getCommentsCount() > 0) { hash = (37 * hash) + COMMENTS_FIELD_NUMBER; hash = (53 * hash) + getCommentsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.support.v2.ListCommentsResponse parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.support.v2.ListCommentsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.support.v2.ListCommentsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.support.v2.ListCommentsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.support.v2.ListCommentsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.support.v2.ListCommentsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.support.v2.ListCommentsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.support.v2.ListCommentsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.support.v2.ListCommentsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.support.v2.ListCommentsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.support.v2.ListCommentsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.support.v2.ListCommentsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.support.v2.ListCommentsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The response message for the ListComments endpoint. * </pre> * * Protobuf type {@code google.cloud.support.v2.ListCommentsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.support.v2.ListCommentsResponse) com.google.cloud.support.v2.ListCommentsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.support.v2.CommentServiceProto .internal_static_google_cloud_support_v2_ListCommentsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.support.v2.CommentServiceProto .internal_static_google_cloud_support_v2_ListCommentsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.support.v2.ListCommentsResponse.class, com.google.cloud.support.v2.ListCommentsResponse.Builder.class); } // Construct using com.google.cloud.support.v2.ListCommentsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (commentsBuilder_ == null) { comments_ = java.util.Collections.emptyList(); } else { comments_ = null; commentsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.support.v2.CommentServiceProto .internal_static_google_cloud_support_v2_ListCommentsResponse_descriptor; } @java.lang.Override public com.google.cloud.support.v2.ListCommentsResponse getDefaultInstanceForType() { return com.google.cloud.support.v2.ListCommentsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.support.v2.ListCommentsResponse build() { com.google.cloud.support.v2.ListCommentsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.support.v2.ListCommentsResponse buildPartial() { com.google.cloud.support.v2.ListCommentsResponse result = new com.google.cloud.support.v2.ListCommentsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.support.v2.ListCommentsResponse result) { if (commentsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { comments_ = java.util.Collections.unmodifiableList(comments_); bitField0_ = (bitField0_ & ~0x00000001); } result.comments_ = comments_; } else { result.comments_ = commentsBuilder_.build(); } } private void buildPartial0(com.google.cloud.support.v2.ListCommentsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.support.v2.ListCommentsResponse) { return mergeFrom((com.google.cloud.support.v2.ListCommentsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.support.v2.ListCommentsResponse other) { if (other == com.google.cloud.support.v2.ListCommentsResponse.getDefaultInstance()) return this; if (commentsBuilder_ == null) { if (!other.comments_.isEmpty()) { if (comments_.isEmpty()) { comments_ = other.comments_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureCommentsIsMutable(); comments_.addAll(other.comments_); } onChanged(); } } else { if (!other.comments_.isEmpty()) { if (commentsBuilder_.isEmpty()) { commentsBuilder_.dispose(); commentsBuilder_ = null; comments_ = other.comments_; bitField0_ = (bitField0_ & ~0x00000001); commentsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getCommentsFieldBuilder() : null; } else { commentsBuilder_.addAllMessages(other.comments_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.support.v2.Comment m = input.readMessage( com.google.cloud.support.v2.Comment.parser(), extensionRegistry); if (commentsBuilder_ == null) { ensureCommentsIsMutable(); comments_.add(m); } else { commentsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.support.v2.Comment> comments_ = java.util.Collections.emptyList(); private void ensureCommentsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { comments_ = new java.util.ArrayList<com.google.cloud.support.v2.Comment>(comments_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.support.v2.Comment, com.google.cloud.support.v2.Comment.Builder, com.google.cloud.support.v2.CommentOrBuilder> commentsBuilder_; /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2.Comment comments = 1;</code> */ public java.util.List<com.google.cloud.support.v2.Comment> getCommentsList() { if (commentsBuilder_ == null) { return java.util.Collections.unmodifiableList(comments_); } else { return commentsBuilder_.getMessageList(); } } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2.Comment comments = 1;</code> */ public int getCommentsCount() { if (commentsBuilder_ == null) { return comments_.size(); } else { return commentsBuilder_.getCount(); } } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2.Comment comments = 1;</code> */ public com.google.cloud.support.v2.Comment getComments(int index) { if (commentsBuilder_ == null) { return comments_.get(index); } else { return commentsBuilder_.getMessage(index); } } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2.Comment comments = 1;</code> */ public Builder setComments(int index, com.google.cloud.support.v2.Comment value) { if (commentsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCommentsIsMutable(); comments_.set(index, value); onChanged(); } else { commentsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2.Comment comments = 1;</code> */ public Builder setComments( int index, com.google.cloud.support.v2.Comment.Builder builderForValue) { if (commentsBuilder_ == null) { ensureCommentsIsMutable(); comments_.set(index, builderForValue.build()); onChanged(); } else { commentsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2.Comment comments = 1;</code> */ public Builder addComments(com.google.cloud.support.v2.Comment value) { if (commentsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCommentsIsMutable(); comments_.add(value); onChanged(); } else { commentsBuilder_.addMessage(value); } return this; } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2.Comment comments = 1;</code> */ public Builder addComments(int index, com.google.cloud.support.v2.Comment value) { if (commentsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCommentsIsMutable(); comments_.add(index, value); onChanged(); } else { commentsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2.Comment comments = 1;</code> */ public Builder addComments(com.google.cloud.support.v2.Comment.Builder builderForValue) { if (commentsBuilder_ == null) { ensureCommentsIsMutable(); comments_.add(builderForValue.build()); onChanged(); } else { commentsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2.Comment comments = 1;</code> */ public Builder addComments( int index, com.google.cloud.support.v2.Comment.Builder builderForValue) { if (commentsBuilder_ == null) { ensureCommentsIsMutable(); comments_.add(index, builderForValue.build()); onChanged(); } else { commentsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2.Comment comments = 1;</code> */ public Builder addAllComments( java.lang.Iterable<? extends com.google.cloud.support.v2.Comment> values) { if (commentsBuilder_ == null) { ensureCommentsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, comments_); onChanged(); } else { commentsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2.Comment comments = 1;</code> */ public Builder clearComments() { if (commentsBuilder_ == null) { comments_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { commentsBuilder_.clear(); } return this; } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2.Comment comments = 1;</code> */ public Builder removeComments(int index) { if (commentsBuilder_ == null) { ensureCommentsIsMutable(); comments_.remove(index); onChanged(); } else { commentsBuilder_.remove(index); } return this; } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2.Comment comments = 1;</code> */ public com.google.cloud.support.v2.Comment.Builder getCommentsBuilder(int index) { return getCommentsFieldBuilder().getBuilder(index); } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2.Comment comments = 1;</code> */ public com.google.cloud.support.v2.CommentOrBuilder getCommentsOrBuilder(int index) { if (commentsBuilder_ == null) { return comments_.get(index); } else { return commentsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2.Comment comments = 1;</code> */ public java.util.List<? extends com.google.cloud.support.v2.CommentOrBuilder> getCommentsOrBuilderList() { if (commentsBuilder_ != null) { return commentsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(comments_); } } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2.Comment comments = 1;</code> */ public com.google.cloud.support.v2.Comment.Builder addCommentsBuilder() { return getCommentsFieldBuilder() .addBuilder(com.google.cloud.support.v2.Comment.getDefaultInstance()); } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2.Comment comments = 1;</code> */ public com.google.cloud.support.v2.Comment.Builder addCommentsBuilder(int index) { return getCommentsFieldBuilder() .addBuilder(index, com.google.cloud.support.v2.Comment.getDefaultInstance()); } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2.Comment comments = 1;</code> */ public java.util.List<com.google.cloud.support.v2.Comment.Builder> getCommentsBuilderList() { return getCommentsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.support.v2.Comment, com.google.cloud.support.v2.Comment.Builder, com.google.cloud.support.v2.CommentOrBuilder> getCommentsFieldBuilder() { if (commentsBuilder_ == null) { commentsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.support.v2.Comment, com.google.cloud.support.v2.Comment.Builder, com.google.cloud.support.v2.CommentOrBuilder>( comments_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); comments_ = null; } return commentsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.comments.list` requests. If unspecified, there * are no more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.comments.list` requests. If unspecified, there * are no more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.comments.list` requests. If unspecified, there * are no more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.comments.list` requests. If unspecified, there * are no more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.comments.list` requests. If unspecified, there * are no more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.support.v2.ListCommentsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.support.v2.ListCommentsResponse) private static final com.google.cloud.support.v2.ListCommentsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.support.v2.ListCommentsResponse(); } public static com.google.cloud.support.v2.ListCommentsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListCommentsResponse> PARSER = new com.google.protobuf.AbstractParser<ListCommentsResponse>() { @java.lang.Override public ListCommentsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListCommentsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListCommentsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.support.v2.ListCommentsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
openjdk/jdk8
36,435
langtools/src/share/classes/com/sun/tools/javac/api/JavacTrees.java
/* * Copyright (c) 2005, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.sun.tools.javac.api; import java.io.IOException; import java.util.HashSet; import java.util.Set; import javax.annotation.processing.ProcessingEnvironment; import javax.lang.model.element.AnnotationMirror; import javax.lang.model.element.AnnotationValue; import javax.lang.model.element.Element; import javax.lang.model.element.ElementKind; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.TypeElement; import javax.lang.model.type.DeclaredType; import javax.lang.model.type.TypeKind; import javax.lang.model.type.TypeMirror; import javax.tools.Diagnostic; import javax.tools.JavaCompiler; import javax.tools.JavaFileObject; import com.sun.source.doctree.DocCommentTree; import com.sun.source.doctree.DocTree; import com.sun.source.tree.CatchTree; import com.sun.source.tree.CompilationUnitTree; import com.sun.source.tree.Scope; import com.sun.source.tree.Tree; import com.sun.source.util.DocSourcePositions; import com.sun.source.util.DocTreePath; import com.sun.source.util.DocTreeScanner; import com.sun.source.util.DocTrees; import com.sun.source.util.JavacTask; import com.sun.source.util.TreePath; import com.sun.tools.javac.code.Flags; import com.sun.tools.javac.code.Kinds; import com.sun.tools.javac.code.Symbol; import com.sun.tools.javac.code.Symbol.ClassSymbol; import com.sun.tools.javac.code.Symbol.MethodSymbol; import com.sun.tools.javac.code.Symbol.PackageSymbol; import com.sun.tools.javac.code.Symbol.TypeSymbol; import com.sun.tools.javac.code.Symbol.VarSymbol; import com.sun.tools.javac.code.Type; import com.sun.tools.javac.code.Type.ArrayType; import com.sun.tools.javac.code.Type.ClassType; import com.sun.tools.javac.code.Type.ErrorType; import com.sun.tools.javac.code.Type.UnionClassType; import com.sun.tools.javac.code.Types; import com.sun.tools.javac.code.Types.TypeRelation; import com.sun.tools.javac.comp.Attr; import com.sun.tools.javac.comp.AttrContext; import com.sun.tools.javac.comp.Enter; import com.sun.tools.javac.comp.Env; import com.sun.tools.javac.comp.MemberEnter; import com.sun.tools.javac.comp.Resolve; import com.sun.tools.javac.model.JavacElements; import com.sun.tools.javac.processing.JavacProcessingEnvironment; import com.sun.tools.javac.tree.DCTree; import com.sun.tools.javac.tree.DCTree.DCBlockTag; import com.sun.tools.javac.tree.DCTree.DCDocComment; import com.sun.tools.javac.tree.DCTree.DCEndPosTree; import com.sun.tools.javac.tree.DCTree.DCErroneous; import com.sun.tools.javac.tree.DCTree.DCIdentifier; import com.sun.tools.javac.tree.DCTree.DCParam; import com.sun.tools.javac.tree.DCTree.DCReference; import com.sun.tools.javac.tree.DCTree.DCText; import com.sun.tools.javac.tree.EndPosTable; import com.sun.tools.javac.tree.JCTree; import com.sun.tools.javac.tree.JCTree.*; import com.sun.tools.javac.tree.TreeCopier; import com.sun.tools.javac.tree.TreeInfo; import com.sun.tools.javac.tree.TreeMaker; import com.sun.tools.javac.util.Abort; import com.sun.tools.javac.util.Assert; import com.sun.tools.javac.util.Context; import com.sun.tools.javac.util.JCDiagnostic; import com.sun.tools.javac.util.List; import com.sun.tools.javac.util.ListBuffer; import com.sun.tools.javac.util.Log; import com.sun.tools.javac.util.Name; import com.sun.tools.javac.util.Names; import com.sun.tools.javac.util.Pair; import com.sun.tools.javac.util.Position; import static com.sun.tools.javac.code.TypeTag.*; /** * Provides an implementation of Trees. * * <p><b>This is NOT part of any supported API. * If you write code that depends on this, you do so at your own * risk. This code and its internal interfaces are subject to change * or deletion without notice.</b></p> * * @author Peter von der Ah&eacute; */ public class JavacTrees extends DocTrees { // in a world of a single context per compilation, these would all be final private Resolve resolve; private Enter enter; private Log log; private MemberEnter memberEnter; private Attr attr; private TreeMaker treeMaker; private JavacElements elements; private JavacTaskImpl javacTaskImpl; private Names names; private Types types; // called reflectively from Trees.instance(CompilationTask task) public static JavacTrees instance(JavaCompiler.CompilationTask task) { if (!(task instanceof BasicJavacTask)) throw new IllegalArgumentException(); return instance(((BasicJavacTask)task).getContext()); } // called reflectively from Trees.instance(ProcessingEnvironment env) public static JavacTrees instance(ProcessingEnvironment env) { if (!(env instanceof JavacProcessingEnvironment)) throw new IllegalArgumentException(); return instance(((JavacProcessingEnvironment)env).getContext()); } public static JavacTrees instance(Context context) { JavacTrees instance = context.get(JavacTrees.class); if (instance == null) instance = new JavacTrees(context); return instance; } protected JavacTrees(Context context) { context.put(JavacTrees.class, this); init(context); } public void updateContext(Context context) { init(context); } private void init(Context context) { attr = Attr.instance(context); enter = Enter.instance(context); elements = JavacElements.instance(context); log = Log.instance(context); resolve = Resolve.instance(context); treeMaker = TreeMaker.instance(context); memberEnter = MemberEnter.instance(context); names = Names.instance(context); types = Types.instance(context); JavacTask t = context.get(JavacTask.class); if (t instanceof JavacTaskImpl) javacTaskImpl = (JavacTaskImpl) t; } public DocSourcePositions getSourcePositions() { return new DocSourcePositions() { public long getStartPosition(CompilationUnitTree file, Tree tree) { return TreeInfo.getStartPos((JCTree) tree); } public long getEndPosition(CompilationUnitTree file, Tree tree) { EndPosTable endPosTable = ((JCCompilationUnit) file).endPositions; return TreeInfo.getEndPos((JCTree) tree, endPosTable); } public long getStartPosition(CompilationUnitTree file, DocCommentTree comment, DocTree tree) { return ((DCTree) tree).getSourcePosition((DCDocComment) comment); } @SuppressWarnings("fallthrough") public long getEndPosition(CompilationUnitTree file, DocCommentTree comment, DocTree tree) { DCDocComment dcComment = (DCDocComment) comment; if (tree instanceof DCEndPosTree) { int endPos = ((DCEndPosTree) tree).getEndPos(dcComment); if (endPos != Position.NOPOS) { return endPos; } } int correction = 0; switch (tree.getKind()) { case TEXT: DCText text = (DCText) tree; return dcComment.comment.getSourcePos(text.pos + text.text.length()); case ERRONEOUS: DCErroneous err = (DCErroneous) tree; return dcComment.comment.getSourcePos(err.pos + err.body.length()); case IDENTIFIER: DCIdentifier ident = (DCIdentifier) tree; return dcComment.comment.getSourcePos(ident.pos + (ident.name != names.error ? ident.name.length() : 0)); case PARAM: DCParam param = (DCParam) tree; if (param.isTypeParameter && param.getDescription().isEmpty()) { correction = 1; } case AUTHOR: case DEPRECATED: case RETURN: case SEE: case SERIAL: case SERIAL_DATA: case SERIAL_FIELD: case SINCE: case THROWS: case UNKNOWN_BLOCK_TAG: case VERSION: { DocTree last = getLastChild(tree); if (last != null) { return getEndPosition(file, comment, last) + correction; } DCBlockTag block = (DCBlockTag) tree; return dcComment.comment.getSourcePos(block.pos + block.getTagName().length() + 1); } default: DocTree last = getLastChild(tree); if (last != null) { return getEndPosition(file, comment, last); } break; } return Position.NOPOS; } }; } private DocTree getLastChild(DocTree tree) { final DocTree[] last = new DocTree[] {null}; tree.accept(new DocTreeScanner<Void, Void>() { @Override public Void scan(DocTree node, Void p) { if (node != null) last[0] = node; return null; } }, null); return last[0]; } public JCClassDecl getTree(TypeElement element) { return (JCClassDecl) getTree((Element) element); } public JCMethodDecl getTree(ExecutableElement method) { return (JCMethodDecl) getTree((Element) method); } public JCTree getTree(Element element) { Symbol symbol = (Symbol) element; TypeSymbol enclosing = symbol.enclClass(); Env<AttrContext> env = enter.getEnv(enclosing); if (env == null) return null; JCClassDecl classNode = env.enclClass; if (classNode != null) { if (TreeInfo.symbolFor(classNode) == element) return classNode; for (JCTree node : classNode.getMembers()) if (TreeInfo.symbolFor(node) == element) return node; } return null; } public JCTree getTree(Element e, AnnotationMirror a) { return getTree(e, a, null); } public JCTree getTree(Element e, AnnotationMirror a, AnnotationValue v) { Pair<JCTree, JCCompilationUnit> treeTopLevel = elements.getTreeAndTopLevel(e, a, v); if (treeTopLevel == null) return null; return treeTopLevel.fst; } public TreePath getPath(CompilationUnitTree unit, Tree node) { return TreePath.getPath(unit, node); } public TreePath getPath(Element e) { return getPath(e, null, null); } public TreePath getPath(Element e, AnnotationMirror a) { return getPath(e, a, null); } public TreePath getPath(Element e, AnnotationMirror a, AnnotationValue v) { final Pair<JCTree, JCCompilationUnit> treeTopLevel = elements.getTreeAndTopLevel(e, a, v); if (treeTopLevel == null) return null; return TreePath.getPath(treeTopLevel.snd, treeTopLevel.fst); } public Symbol getElement(TreePath path) { JCTree tree = (JCTree) path.getLeaf(); Symbol sym = TreeInfo.symbolFor(tree); if (sym == null) { if (TreeInfo.isDeclaration(tree)) { for (TreePath p = path; p != null; p = p.getParentPath()) { JCTree t = (JCTree) p.getLeaf(); if (t.hasTag(JCTree.Tag.CLASSDEF)) { JCClassDecl ct = (JCClassDecl) t; if (ct.sym != null) { if ((ct.sym.flags_field & Flags.UNATTRIBUTED) != 0) { attr.attribClass(ct.pos(), ct.sym); sym = TreeInfo.symbolFor(tree); } break; } } } } } return sym; } @Override public Element getElement(DocTreePath path) { DocTree forTree = path.getLeaf(); if (forTree instanceof DCReference) return attributeDocReference(path.getTreePath(), ((DCReference) forTree)); if (forTree instanceof DCIdentifier) { if (path.getParentPath().getLeaf() instanceof DCParam) { return attributeParamIdentifier(path.getTreePath(), (DCParam) path.getParentPath().getLeaf()); } } return null; } private Symbol attributeDocReference(TreePath path, DCReference ref) { Env<AttrContext> env = getAttrContext(path); Log.DeferredDiagnosticHandler deferredDiagnosticHandler = new Log.DeferredDiagnosticHandler(log); try { final TypeSymbol tsym; final Name memberName; if (ref.qualifierExpression == null) { tsym = env.enclClass.sym; memberName = ref.memberName; } else { // See if the qualifierExpression is a type or package name. // javac does not provide the exact method required, so // we first check if qualifierExpression identifies a type, // and if not, then we check to see if it identifies a package. Type t = attr.attribType(ref.qualifierExpression, env); if (t.isErroneous()) { if (ref.memberName == null) { // Attr/Resolve assume packages exist and create symbols as needed // so use getPackageElement to restrict search to existing packages PackageSymbol pck = elements.getPackageElement(ref.qualifierExpression.toString()); if (pck != null) { return pck; } else if (ref.qualifierExpression.hasTag(JCTree.Tag.IDENT)) { // fixup: allow "identifier" instead of "#identifier" // for compatibility with javadoc tsym = env.enclClass.sym; memberName = ((JCIdent) ref.qualifierExpression).name; } else return null; } else { return null; } } else { tsym = t.tsym; memberName = ref.memberName; } } if (memberName == null) return tsym; final List<Type> paramTypes; if (ref.paramTypes == null) paramTypes = null; else { ListBuffer<Type> lb = new ListBuffer<Type>(); for (List<JCTree> l = ref.paramTypes; l.nonEmpty(); l = l.tail) { JCTree tree = l.head; Type t = attr.attribType(tree, env); lb.add(t); } paramTypes = lb.toList(); } ClassSymbol sym = (ClassSymbol) types.upperBound(tsym.type).tsym; Symbol msym = (memberName == sym.name) ? findConstructor(sym, paramTypes) : findMethod(sym, memberName, paramTypes); if (paramTypes != null) { // explicit (possibly empty) arg list given, so cannot be a field return msym; } VarSymbol vsym = (ref.paramTypes != null) ? null : findField(sym, memberName); // prefer a field over a method with no parameters if (vsym != null && (msym == null || types.isSubtypeUnchecked(vsym.enclClass().asType(), msym.enclClass().asType()))) { return vsym; } else { return msym; } } catch (Abort e) { // may be thrown by Check.completionError in case of bad class file return null; } finally { log.popDiagnosticHandler(deferredDiagnosticHandler); } } private Symbol attributeParamIdentifier(TreePath path, DCParam ptag) { Symbol javadocSymbol = getElement(path); if (javadocSymbol == null) return null; ElementKind kind = javadocSymbol.getKind(); List<? extends Symbol> params = List.nil(); if (kind == ElementKind.METHOD || kind == ElementKind.CONSTRUCTOR) { MethodSymbol ee = (MethodSymbol) javadocSymbol; params = ptag.isTypeParameter() ? ee.getTypeParameters() : ee.getParameters(); } else if (kind.isClass() || kind.isInterface()) { ClassSymbol te = (ClassSymbol) javadocSymbol; params = te.getTypeParameters(); } for (Symbol param : params) { if (param.getSimpleName() == ptag.getName().getName()) { return param; } } return null; } /** @see com.sun.tools.javadoc.ClassDocImpl#findField */ private VarSymbol findField(ClassSymbol tsym, Name fieldName) { return searchField(tsym, fieldName, new HashSet<ClassSymbol>()); } /** @see com.sun.tools.javadoc.ClassDocImpl#searchField */ private VarSymbol searchField(ClassSymbol tsym, Name fieldName, Set<ClassSymbol> searched) { if (searched.contains(tsym)) { return null; } searched.add(tsym); for (com.sun.tools.javac.code.Scope.Entry e = tsym.members().lookup(fieldName); e.scope != null; e = e.next()) { if (e.sym.kind == Kinds.VAR) { return (VarSymbol)e.sym; } } //### If we found a VarSymbol above, but which did not pass //### the modifier filter, we should return failure here! ClassSymbol encl = tsym.owner.enclClass(); if (encl != null) { VarSymbol vsym = searchField(encl, fieldName, searched); if (vsym != null) { return vsym; } } // search superclass Type superclass = tsym.getSuperclass(); if (superclass.tsym != null) { VarSymbol vsym = searchField((ClassSymbol) superclass.tsym, fieldName, searched); if (vsym != null) { return vsym; } } // search interfaces List<Type> intfs = tsym.getInterfaces(); for (List<Type> l = intfs; l.nonEmpty(); l = l.tail) { Type intf = l.head; if (intf.isErroneous()) continue; VarSymbol vsym = searchField((ClassSymbol) intf.tsym, fieldName, searched); if (vsym != null) { return vsym; } } return null; } /** @see com.sun.tools.javadoc.ClassDocImpl#findConstructor */ MethodSymbol findConstructor(ClassSymbol tsym, List<Type> paramTypes) { for (com.sun.tools.javac.code.Scope.Entry e = tsym.members().lookup(names.init); e.scope != null; e = e.next()) { if (e.sym.kind == Kinds.MTH) { if (hasParameterTypes((MethodSymbol) e.sym, paramTypes)) { return (MethodSymbol) e.sym; } } } return null; } /** @see com.sun.tools.javadoc.ClassDocImpl#findMethod */ private MethodSymbol findMethod(ClassSymbol tsym, Name methodName, List<Type> paramTypes) { return searchMethod(tsym, methodName, paramTypes, new HashSet<ClassSymbol>()); } /** @see com.sun.tools.javadoc.ClassDocImpl#searchMethod */ private MethodSymbol searchMethod(ClassSymbol tsym, Name methodName, List<Type> paramTypes, Set<ClassSymbol> searched) { //### Note that this search is not necessarily what the compiler would do! // do not match constructors if (methodName == names.init) return null; if (searched.contains(tsym)) return null; searched.add(tsym); // search current class com.sun.tools.javac.code.Scope.Entry e = tsym.members().lookup(methodName); //### Using modifier filter here isn't really correct, //### but emulates the old behavior. Instead, we should //### apply the normal rules of visibility and inheritance. if (paramTypes == null) { // If no parameters specified, we are allowed to return // any method with a matching name. In practice, the old // code returned the first method, which is now the last! // In order to provide textually identical results, we // attempt to emulate the old behavior. MethodSymbol lastFound = null; for (; e.scope != null; e = e.next()) { if (e.sym.kind == Kinds.MTH) { if (e.sym.name == methodName) { lastFound = (MethodSymbol)e.sym; } } } if (lastFound != null) { return lastFound; } } else { for (; e.scope != null; e = e.next()) { if (e.sym != null && e.sym.kind == Kinds.MTH) { if (hasParameterTypes((MethodSymbol) e.sym, paramTypes)) { return (MethodSymbol) e.sym; } } } } //### If we found a MethodSymbol above, but which did not pass //### the modifier filter, we should return failure here! // search superclass Type superclass = tsym.getSuperclass(); if (superclass.tsym != null) { MethodSymbol msym = searchMethod((ClassSymbol) superclass.tsym, methodName, paramTypes, searched); if (msym != null) { return msym; } } // search interfaces List<Type> intfs = tsym.getInterfaces(); for (List<Type> l = intfs; l.nonEmpty(); l = l.tail) { Type intf = l.head; if (intf.isErroneous()) continue; MethodSymbol msym = searchMethod((ClassSymbol) intf.tsym, methodName, paramTypes, searched); if (msym != null) { return msym; } } // search enclosing class ClassSymbol encl = tsym.owner.enclClass(); if (encl != null) { MethodSymbol msym = searchMethod(encl, methodName, paramTypes, searched); if (msym != null) { return msym; } } return null; } /** @see com.sun.tools.javadoc.ClassDocImpl */ private boolean hasParameterTypes(MethodSymbol method, List<Type> paramTypes) { if (paramTypes == null) return true; if (method.params().size() != paramTypes.size()) return false; List<Type> methodParamTypes = types.erasureRecursive(method.asType()).getParameterTypes(); return (Type.isErroneous(paramTypes)) ? fuzzyMatch(paramTypes, methodParamTypes) : types.isSameTypes(paramTypes, methodParamTypes); } boolean fuzzyMatch(List<Type> paramTypes, List<Type> methodParamTypes) { List<Type> l1 = paramTypes; List<Type> l2 = methodParamTypes; while (l1.nonEmpty()) { if (!fuzzyMatch(l1.head, l2.head)) return false; l1 = l1.tail; l2 = l2.tail; } return true; } boolean fuzzyMatch(Type paramType, Type methodParamType) { Boolean b = fuzzyMatcher.visit(paramType, methodParamType); return (b == Boolean.TRUE); } TypeRelation fuzzyMatcher = new TypeRelation() { @Override public Boolean visitType(Type t, Type s) { if (t == s) return true; if (s.isPartial()) return visit(s, t); switch (t.getTag()) { case BYTE: case CHAR: case SHORT: case INT: case LONG: case FLOAT: case DOUBLE: case BOOLEAN: case VOID: case BOT: case NONE: return t.hasTag(s.getTag()); default: throw new AssertionError("fuzzyMatcher " + t.getTag()); } } @Override public Boolean visitArrayType(ArrayType t, Type s) { if (t == s) return true; if (s.isPartial()) return visit(s, t); return s.hasTag(ARRAY) && visit(t.elemtype, types.elemtype(s)); } @Override public Boolean visitClassType(ClassType t, Type s) { if (t == s) return true; if (s.isPartial()) return visit(s, t); return t.tsym == s.tsym; } @Override public Boolean visitErrorType(ErrorType t, Type s) { return s.hasTag(CLASS) && t.tsym.name == ((ClassType) s).tsym.name; } }; public TypeMirror getTypeMirror(TreePath path) { Tree t = path.getLeaf(); return ((JCTree)t).type; } public JavacScope getScope(TreePath path) { return new JavacScope(getAttrContext(path)); } public String getDocComment(TreePath path) { CompilationUnitTree t = path.getCompilationUnit(); Tree leaf = path.getLeaf(); if (t instanceof JCTree.JCCompilationUnit && leaf instanceof JCTree) { JCCompilationUnit cu = (JCCompilationUnit) t; if (cu.docComments != null) { return cu.docComments.getCommentText((JCTree) leaf); } } return null; } public DocCommentTree getDocCommentTree(TreePath path) { CompilationUnitTree t = path.getCompilationUnit(); Tree leaf = path.getLeaf(); if (t instanceof JCTree.JCCompilationUnit && leaf instanceof JCTree) { JCCompilationUnit cu = (JCCompilationUnit) t; if (cu.docComments != null) { return cu.docComments.getCommentTree((JCTree) leaf); } } return null; } public boolean isAccessible(Scope scope, TypeElement type) { if (scope instanceof JavacScope && type instanceof ClassSymbol) { Env<AttrContext> env = ((JavacScope) scope).env; return resolve.isAccessible(env, (ClassSymbol)type, true); } else return false; } public boolean isAccessible(Scope scope, Element member, DeclaredType type) { if (scope instanceof JavacScope && member instanceof Symbol && type instanceof com.sun.tools.javac.code.Type) { Env<AttrContext> env = ((JavacScope) scope).env; return resolve.isAccessible(env, (com.sun.tools.javac.code.Type)type, (Symbol)member, true); } else return false; } private Env<AttrContext> getAttrContext(TreePath path) { if (!(path.getLeaf() instanceof JCTree)) // implicit null-check throw new IllegalArgumentException(); // if we're being invoked from a Tree API client via parse/enter/analyze, // we need to make sure all the classes have been entered; // if we're being invoked from JSR 199 or JSR 269, then the classes // will already have been entered. if (javacTaskImpl != null) { try { javacTaskImpl.enter(null); } catch (IOException e) { throw new Error("unexpected error while entering symbols: " + e); } } JCCompilationUnit unit = (JCCompilationUnit) path.getCompilationUnit(); Copier copier = createCopier(treeMaker.forToplevel(unit)); Env<AttrContext> env = null; JCMethodDecl method = null; JCVariableDecl field = null; List<Tree> l = List.nil(); TreePath p = path; while (p != null) { l = l.prepend(p.getLeaf()); p = p.getParentPath(); } for ( ; l.nonEmpty(); l = l.tail) { Tree tree = l.head; switch (tree.getKind()) { case COMPILATION_UNIT: // System.err.println("COMP: " + ((JCCompilationUnit)tree).sourcefile); env = enter.getTopLevelEnv((JCCompilationUnit)tree); break; case ANNOTATION_TYPE: case CLASS: case ENUM: case INTERFACE: // System.err.println("CLASS: " + ((JCClassDecl)tree).sym.getSimpleName()); env = enter.getClassEnv(((JCClassDecl)tree).sym); break; case METHOD: // System.err.println("METHOD: " + ((JCMethodDecl)tree).sym.getSimpleName()); method = (JCMethodDecl)tree; env = memberEnter.getMethodEnv(method, env); break; case VARIABLE: // System.err.println("FIELD: " + ((JCVariableDecl)tree).sym.getSimpleName()); field = (JCVariableDecl)tree; break; case BLOCK: { // System.err.println("BLOCK: "); if (method != null) { try { Assert.check(method.body == tree); method.body = copier.copy((JCBlock)tree, (JCTree) path.getLeaf()); env = attribStatToTree(method.body, env, copier.leafCopy); } finally { method.body = (JCBlock) tree; } } else { JCBlock body = copier.copy((JCBlock)tree, (JCTree) path.getLeaf()); env = attribStatToTree(body, env, copier.leafCopy); } return env; } default: // System.err.println("DEFAULT: " + tree.getKind()); if (field != null && field.getInitializer() == tree) { env = memberEnter.getInitEnv(field, env); JCExpression expr = copier.copy((JCExpression)tree, (JCTree) path.getLeaf()); env = attribExprToTree(expr, env, copier.leafCopy); return env; } } } return (field != null) ? memberEnter.getInitEnv(field, env) : env; } private Env<AttrContext> attribStatToTree(JCTree stat, Env<AttrContext>env, JCTree tree) { JavaFileObject prev = log.useSource(env.toplevel.sourcefile); try { return attr.attribStatToTree(stat, env, tree); } finally { log.useSource(prev); } } private Env<AttrContext> attribExprToTree(JCExpression expr, Env<AttrContext>env, JCTree tree) { JavaFileObject prev = log.useSource(env.toplevel.sourcefile); try { return attr.attribExprToTree(expr, env, tree); } finally { log.useSource(prev); } } /** * Makes a copy of a tree, noting the value resulting from copying a particular leaf. **/ protected static class Copier extends TreeCopier<JCTree> { JCTree leafCopy = null; protected Copier(TreeMaker M) { super(M); } @Override public <T extends JCTree> T copy(T t, JCTree leaf) { T t2 = super.copy(t, leaf); if (t == leaf) leafCopy = t2; return t2; } } protected Copier createCopier(TreeMaker maker) { return new Copier(maker); } /** * Gets the original type from the ErrorType object. * @param errorType The errorType for which we want to get the original type. * @returns TypeMirror corresponding to the original type, replaced by the ErrorType. * noType (type.tag == NONE) is returned if there is no original type. */ public TypeMirror getOriginalType(javax.lang.model.type.ErrorType errorType) { if (errorType instanceof com.sun.tools.javac.code.Type.ErrorType) { return ((com.sun.tools.javac.code.Type.ErrorType)errorType).getOriginalType(); } return com.sun.tools.javac.code.Type.noType; } /** * Prints a message of the specified kind at the location of the * tree within the provided compilation unit * * @param kind the kind of message * @param msg the message, or an empty string if none * @param t the tree to use as a position hint * @param root the compilation unit that contains tree */ public void printMessage(Diagnostic.Kind kind, CharSequence msg, com.sun.source.tree.Tree t, com.sun.source.tree.CompilationUnitTree root) { printMessage(kind, msg, ((JCTree) t).pos(), root); } public void printMessage(Diagnostic.Kind kind, CharSequence msg, com.sun.source.doctree.DocTree t, com.sun.source.doctree.DocCommentTree c, com.sun.source.tree.CompilationUnitTree root) { printMessage(kind, msg, ((DCTree) t).pos((DCDocComment) c), root); } private void printMessage(Diagnostic.Kind kind, CharSequence msg, JCDiagnostic.DiagnosticPosition pos, com.sun.source.tree.CompilationUnitTree root) { JavaFileObject oldSource = null; JavaFileObject newSource = null; newSource = root.getSourceFile(); if (newSource == null) { pos = null; } else { oldSource = log.useSource(newSource); } try { switch (kind) { case ERROR: boolean prev = log.multipleErrors; try { log.error(pos, "proc.messager", msg.toString()); } finally { log.multipleErrors = prev; } break; case WARNING: log.warning(pos, "proc.messager", msg.toString()); break; case MANDATORY_WARNING: log.mandatoryWarning(pos, "proc.messager", msg.toString()); break; default: log.note(pos, "proc.messager", msg.toString()); } } finally { if (oldSource != null) log.useSource(oldSource); } } @Override public TypeMirror getLub(CatchTree tree) { JCCatch ct = (JCCatch) tree; JCVariableDecl v = ct.param; if (v.type != null && v.type.getKind() == TypeKind.UNION) { UnionClassType ut = (UnionClassType) v.type; return ut.getLub(); } else { return v.type; } } }
openjdk/jdk8
36,455
jdk/src/share/classes/java/util/jar/Pack200.java
/* * Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package java.util.jar; import java.util.SortedMap; import java.io.InputStream; import java.io.OutputStream; import java.io.File; import java.io.IOException; import java.beans.PropertyChangeListener; /** * Transforms a JAR file to or from a packed stream in Pack200 format. * Please refer to Network Transfer Format JSR 200 Specification at * <a href=http://jcp.org/aboutJava/communityprocess/review/jsr200/index.html>http://jcp.org/aboutJava/communityprocess/review/jsr200/index.html</a> * <p> * Typically the packer engine is used by application developers * to deploy or host JAR files on a website. * The unpacker engine is used by deployment applications to * transform the byte-stream back to JAR format. * <p> * Here is an example using packer and unpacker: * <pre>{@code * import java.util.jar.Pack200; * import java.util.jar.Pack200.*; * ... * // Create the Packer object * Packer packer = Pack200.newPacker(); * * // Initialize the state by setting the desired properties * Map p = packer.properties(); * // take more time choosing codings for better compression * p.put(Packer.EFFORT, "7"); // default is "5" * // use largest-possible archive segments (>10% better compression). * p.put(Packer.SEGMENT_LIMIT, "-1"); * // reorder files for better compression. * p.put(Packer.KEEP_FILE_ORDER, Packer.FALSE); * // smear modification times to a single value. * p.put(Packer.MODIFICATION_TIME, Packer.LATEST); * // ignore all JAR deflation requests, * // transmitting a single request to use "store" mode. * p.put(Packer.DEFLATE_HINT, Packer.FALSE); * // discard debug attributes * p.put(Packer.CODE_ATTRIBUTE_PFX+"LineNumberTable", Packer.STRIP); * // throw an error if an attribute is unrecognized * p.put(Packer.UNKNOWN_ATTRIBUTE, Packer.ERROR); * // pass one class file uncompressed: * p.put(Packer.PASS_FILE_PFX+0, "mutants/Rogue.class"); * try { * JarFile jarFile = new JarFile("/tmp/testref.jar"); * FileOutputStream fos = new FileOutputStream("/tmp/test.pack"); * // Call the packer * packer.pack(jarFile, fos); * jarFile.close(); * fos.close(); * * File f = new File("/tmp/test.pack"); * FileOutputStream fostream = new FileOutputStream("/tmp/test.jar"); * JarOutputStream jostream = new JarOutputStream(fostream); * Unpacker unpacker = Pack200.newUnpacker(); * // Call the unpacker * unpacker.unpack(f, jostream); * // Must explicitly close the output. * jostream.close(); * } catch (IOException ioe) { * ioe.printStackTrace(); * } * }</pre> * <p> * A Pack200 file compressed with gzip can be hosted on HTTP/1.1 web servers. * The deployment applications can use "Accept-Encoding=pack200-gzip". This * indicates to the server that the client application desires a version of * the file encoded with Pack200 and further compressed with gzip. Please * refer to <a href="{@docRoot}/../technotes/guides/deployment/deployment-guide/pack200.html">Java Deployment Guide</a> for more details and * techniques. * <p> * Unless otherwise noted, passing a <tt>null</tt> argument to a constructor or * method in this class will cause a {@link NullPointerException} to be thrown. * * @author John Rose * @author Kumar Srinivasan * @since 1.5 */ public abstract class Pack200 { private Pack200() {} //prevent instantiation // Static methods of the Pack200 class. /** * Obtain new instance of a class that implements Packer. * <ul> * <li><p>If the system property <tt>java.util.jar.Pack200.Packer</tt> * is defined, then the value is taken to be the fully-qualified name * of a concrete implementation class, which must implement Packer. * This class is loaded and instantiated. If this process fails * then an unspecified error is thrown.</p></li> * * <li><p>If an implementation has not been specified with the system * property, then the system-default implementation class is instantiated, * and the result is returned.</p></li> * </ul> * * <p>Note: The returned object is not guaranteed to operate * correctly if multiple threads use it at the same time. * A multi-threaded application should either allocate multiple * packer engines, or else serialize use of one engine with a lock. * * @return A newly allocated Packer engine. */ public synchronized static Packer newPacker() { return (Packer) newInstance(PACK_PROVIDER); } /** * Obtain new instance of a class that implements Unpacker. * <ul> * <li><p>If the system property <tt>java.util.jar.Pack200.Unpacker</tt> * is defined, then the value is taken to be the fully-qualified * name of a concrete implementation class, which must implement Unpacker. * The class is loaded and instantiated. If this process fails * then an unspecified error is thrown.</p></li> * * <li><p>If an implementation has not been specified with the * system property, then the system-default implementation class * is instantiated, and the result is returned.</p></li> * </ul> * * <p>Note: The returned object is not guaranteed to operate * correctly if multiple threads use it at the same time. * A multi-threaded application should either allocate multiple * unpacker engines, or else serialize use of one engine with a lock. * * @return A newly allocated Unpacker engine. */ public static Unpacker newUnpacker() { return (Unpacker) newInstance(UNPACK_PROVIDER); } // Interfaces /** * The packer engine applies various transformations to the input JAR file, * making the pack stream highly compressible by a compressor such as * gzip or zip. An instance of the engine can be obtained * using {@link #newPacker}. * The high degree of compression is achieved * by using a number of techniques described in the JSR 200 specification. * Some of the techniques are sorting, re-ordering and co-location of the * constant pool. * <p> * The pack engine is initialized to an initial state as described * by their properties below. * The initial state can be manipulated by getting the * engine properties (using {@link #properties}) and storing * the modified properties on the map. * The resource files will be passed through with no changes at all. * The class files will not contain identical bytes, since the unpacker * is free to change minor class file features such as constant pool order. * However, the class files will be semantically identical, * as specified in * <cite>The Java&trade; Virtual Machine Specification</cite>. * <p> * By default, the packer does not change the order of JAR elements. * Also, the modification time and deflation hint of each * JAR element is passed unchanged. * (Any other ZIP-archive information, such as extra attributes * giving Unix file permissions, are lost.) * <p> * Note that packing and unpacking a JAR will in general alter the * bytewise contents of classfiles in the JAR. This means that packing * and unpacking will in general invalidate any digital signatures * which rely on bytewise images of JAR elements. In order both to sign * and to pack a JAR, you must first pack and unpack the JAR to * "normalize" it, then compute signatures on the unpacked JAR elements, * and finally repack the signed JAR. * Both packing steps should * use precisely the same options, and the segment limit may also * need to be set to "-1", to prevent accidental variation of segment * boundaries as class file sizes change slightly. * <p> * (Here's why this works: Any reordering the packer does * of any classfile structures is idempotent, so the second packing * does not change the orderings produced by the first packing. * Also, the unpacker is guaranteed by the JSR 200 specification * to produce a specific bytewise image for any given transmission * ordering of archive elements.) * <p> * In order to maintain backward compatibility, the pack file's version is * set to accommodate the class files present in the input JAR file. In * other words, the pack file version will be the latest, if the class files * are the latest and conversely the pack file version will be the oldest * if the class file versions are also the oldest. For intermediate class * file versions the corresponding pack file version will be used. * For example: * If the input JAR-files are solely comprised of 1.5 (or lesser) * class files, a 1.5 compatible pack file is produced. This will also be * the case for archives that have no class files. * If the input JAR-files contains a 1.6 class file, then the pack file * version will be set to 1.6. * <p> * Note: Unless otherwise noted, passing a <tt>null</tt> argument to a * constructor or method in this class will cause a {@link NullPointerException} * to be thrown. * <p> * @since 1.5 */ public interface Packer { /** * This property is a numeral giving the estimated target size N * (in bytes) of each archive segment. * If a single input file requires more than N bytes, * it will be given its own archive segment. * <p> * As a special case, a value of -1 will produce a single large * segment with all input files, while a value of 0 will * produce one segment for each class. * Larger archive segments result in less fragmentation and * better compression, but processing them requires more memory. * <p> * The size of each segment is estimated by counting the size of each * input file to be transmitted in the segment, along with the size * of its name and other transmitted properties. * <p> * The default is -1, which means the packer will always create a single * segment output file. In cases where extremely large output files are * generated, users are strongly encouraged to use segmenting or break * up the input file into smaller JARs. * <p> * A 10Mb JAR packed without this limit will * typically pack about 10% smaller, but the packer may require * a larger Java heap (about ten times the segment limit). */ String SEGMENT_LIMIT = "pack.segment.limit"; /** * If this property is set to {@link #TRUE}, the packer will transmit * all elements in their original order within the source archive. * <p> * If it is set to {@link #FALSE}, the packer may reorder elements, * and also remove JAR directory entries, which carry no useful * information for Java applications. * (Typically this enables better compression.) * <p> * The default is {@link #TRUE}, which preserves the input information, * but may cause the transmitted archive to be larger than necessary. */ String KEEP_FILE_ORDER = "pack.keep.file.order"; /** * If this property is set to a single decimal digit, the packer will * use the indicated amount of effort in compressing the archive. * Level 1 may produce somewhat larger size and faster compression speed, * while level 9 will take much longer but may produce better compression. * <p> * The special value 0 instructs the packer to copy through the * original JAR file directly, with no compression. The JSR 200 * standard requires any unpacker to understand this special case * as a pass-through of the entire archive. * <p> * The default is 5, investing a modest amount of time to * produce reasonable compression. */ String EFFORT = "pack.effort"; /** * If this property is set to {@link #TRUE} or {@link #FALSE}, the packer * will set the deflation hint accordingly in the output archive, and * will not transmit the individual deflation hints of archive elements. * <p> * If this property is set to the special string {@link #KEEP}, the packer * will attempt to determine an independent deflation hint for each * available element of the input archive, and transmit this hint separately. * <p> * The default is {@link #KEEP}, which preserves the input information, * but may cause the transmitted archive to be larger than necessary. * <p> * It is up to the unpacker implementation * to take action upon the hint to suitably compress the elements of * the resulting unpacked jar. * <p> * The deflation hint of a ZIP or JAR element indicates * whether the element was deflated or stored directly. */ String DEFLATE_HINT = "pack.deflate.hint"; /** * If this property is set to the special string {@link #LATEST}, * the packer will attempt to determine the latest modification time, * among all the available entries in the original archive or the latest * modification time of all the available entries in each segment. * This single value will be transmitted as part of the segment and applied * to all the entries in each segment, {@link #SEGMENT_LIMIT}. * <p> * This can marginally decrease the transmitted size of the * archive, at the expense of setting all installed files to a single * date. * <p> * If this property is set to the special string {@link #KEEP}, * the packer transmits a separate modification time for each input * element. * <p> * The default is {@link #KEEP}, which preserves the input information, * but may cause the transmitted archive to be larger than necessary. * <p> * It is up to the unpacker implementation to take action to suitably * set the modification time of each element of its output file. * @see #SEGMENT_LIMIT */ String MODIFICATION_TIME = "pack.modification.time"; /** * Indicates that a file should be passed through bytewise, with no * compression. Multiple files may be specified by specifying * additional properties with distinct strings appended, to * make a family of properties with the common prefix. * <p> * There is no pathname transformation, except * that the system file separator is replaced by the JAR file * separator '/'. * <p> * The resulting file names must match exactly as strings with their * occurrences in the JAR file. * <p> * If a property value is a directory name, all files under that * directory will be passed also. * <p> * Examples: * <pre>{@code * Map p = packer.properties(); * p.put(PASS_FILE_PFX+0, "mutants/Rogue.class"); * p.put(PASS_FILE_PFX+1, "mutants/Wolverine.class"); * p.put(PASS_FILE_PFX+2, "mutants/Storm.class"); * # Pass all files in an entire directory hierarchy: * p.put(PASS_FILE_PFX+3, "police/"); * }</pre> */ String PASS_FILE_PFX = "pack.pass.file."; /// Attribute control. /** * Indicates the action to take when a class-file containing an unknown * attribute is encountered. Possible values are the strings {@link #ERROR}, * {@link #STRIP}, and {@link #PASS}. * <p> * The string {@link #ERROR} means that the pack operation * as a whole will fail, with an exception of type <code>IOException</code>. * The string * {@link #STRIP} means that the attribute will be dropped. * The string * {@link #PASS} means that the whole class-file will be passed through * (as if it were a resource file) without compression, with a suitable warning. * This is the default value for this property. * <p> * Examples: * <pre>{@code * Map p = pack200.getProperties(); * p.put(UNKNOWN_ATTRIBUTE, ERROR); * p.put(UNKNOWN_ATTRIBUTE, STRIP); * p.put(UNKNOWN_ATTRIBUTE, PASS); * }</pre> */ String UNKNOWN_ATTRIBUTE = "pack.unknown.attribute"; /** * When concatenated with a class attribute name, * indicates the format of that attribute, * using the layout language specified in the JSR 200 specification. * <p> * For example, the effect of this option is built in: * <code>pack.class.attribute.SourceFile=RUH</code>. * <p> * The special strings {@link #ERROR}, {@link #STRIP}, and {@link #PASS} are * also allowed, with the same meaning as {@link #UNKNOWN_ATTRIBUTE}. * This provides a way for users to request that specific attributes be * refused, stripped, or passed bitwise (with no class compression). * <p> * Code like this might be used to support attributes for JCOV: * <pre><code> * Map p = packer.properties(); * p.put(CODE_ATTRIBUTE_PFX+"CoverageTable", "NH[PHHII]"); * p.put(CODE_ATTRIBUTE_PFX+"CharacterRangeTable", "NH[PHPOHIIH]"); * p.put(CLASS_ATTRIBUTE_PFX+"SourceID", "RUH"); * p.put(CLASS_ATTRIBUTE_PFX+"CompilationID", "RUH"); * </code></pre> * <p> * Code like this might be used to strip debugging attributes: * <pre><code> * Map p = packer.properties(); * p.put(CODE_ATTRIBUTE_PFX+"LineNumberTable", STRIP); * p.put(CODE_ATTRIBUTE_PFX+"LocalVariableTable", STRIP); * p.put(CLASS_ATTRIBUTE_PFX+"SourceFile", STRIP); * </code></pre> */ String CLASS_ATTRIBUTE_PFX = "pack.class.attribute."; /** * When concatenated with a field attribute name, * indicates the format of that attribute. * For example, the effect of this option is built in: * <code>pack.field.attribute.Deprecated=</code>. * The special strings {@link #ERROR}, {@link #STRIP}, and * {@link #PASS} are also allowed. * @see #CLASS_ATTRIBUTE_PFX */ String FIELD_ATTRIBUTE_PFX = "pack.field.attribute."; /** * When concatenated with a method attribute name, * indicates the format of that attribute. * For example, the effect of this option is built in: * <code>pack.method.attribute.Exceptions=NH[RCH]</code>. * The special strings {@link #ERROR}, {@link #STRIP}, and {@link #PASS} * are also allowed. * @see #CLASS_ATTRIBUTE_PFX */ String METHOD_ATTRIBUTE_PFX = "pack.method.attribute."; /** * When concatenated with a code attribute name, * indicates the format of that attribute. * For example, the effect of this option is built in: * <code>pack.code.attribute.LocalVariableTable=NH[PHOHRUHRSHH]</code>. * The special strings {@link #ERROR}, {@link #STRIP}, and {@link #PASS} * are also allowed. * @see #CLASS_ATTRIBUTE_PFX */ String CODE_ATTRIBUTE_PFX = "pack.code.attribute."; /** * The unpacker's progress as a percentage, as periodically * updated by the unpacker. * Values of 0 - 100 are normal, and -1 indicates a stall. * Progress can be monitored by polling the value of this * property. * <p> * At a minimum, the unpacker must set progress to 0 * at the beginning of a packing operation, and to 100 * at the end. */ String PROGRESS = "pack.progress"; /** The string "keep", a possible value for certain properties. * @see #DEFLATE_HINT * @see #MODIFICATION_TIME */ String KEEP = "keep"; /** The string "pass", a possible value for certain properties. * @see #UNKNOWN_ATTRIBUTE * @see #CLASS_ATTRIBUTE_PFX * @see #FIELD_ATTRIBUTE_PFX * @see #METHOD_ATTRIBUTE_PFX * @see #CODE_ATTRIBUTE_PFX */ String PASS = "pass"; /** The string "strip", a possible value for certain properties. * @see #UNKNOWN_ATTRIBUTE * @see #CLASS_ATTRIBUTE_PFX * @see #FIELD_ATTRIBUTE_PFX * @see #METHOD_ATTRIBUTE_PFX * @see #CODE_ATTRIBUTE_PFX */ String STRIP = "strip"; /** The string "error", a possible value for certain properties. * @see #UNKNOWN_ATTRIBUTE * @see #CLASS_ATTRIBUTE_PFX * @see #FIELD_ATTRIBUTE_PFX * @see #METHOD_ATTRIBUTE_PFX * @see #CODE_ATTRIBUTE_PFX */ String ERROR = "error"; /** The string "true", a possible value for certain properties. * @see #KEEP_FILE_ORDER * @see #DEFLATE_HINT */ String TRUE = "true"; /** The string "false", a possible value for certain properties. * @see #KEEP_FILE_ORDER * @see #DEFLATE_HINT */ String FALSE = "false"; /** The string "latest", a possible value for certain properties. * @see #MODIFICATION_TIME */ String LATEST = "latest"; /** * Get the set of this engine's properties. * This set is a "live view", so that changing its * contents immediately affects the Packer engine, and * changes from the engine (such as progress indications) * are immediately visible in the map. * * <p>The property map may contain pre-defined implementation * specific and default properties. Users are encouraged to * read the information and fully understand the implications, * before modifying pre-existing properties. * <p> * Implementation specific properties are prefixed with a * package name associated with the implementor, beginning * with <tt>com.</tt> or a similar prefix. * All property names beginning with <tt>pack.</tt> and * <tt>unpack.</tt> are reserved for use by this API. * <p> * Unknown properties may be ignored or rejected with an * unspecified error, and invalid entries may cause an * unspecified error to be thrown. * * <p> * The returned map implements all optional {@link SortedMap} operations * @return A sorted association of property key strings to property * values. */ SortedMap<String,String> properties(); /** * Takes a JarFile and converts it into a Pack200 archive. * <p> * Closes its input but not its output. (Pack200 archives are appendable.) * @param in a JarFile * @param out an OutputStream * @exception IOException if an error is encountered. */ void pack(JarFile in, OutputStream out) throws IOException ; /** * Takes a JarInputStream and converts it into a Pack200 archive. * <p> * Closes its input but not its output. (Pack200 archives are appendable.) * <p> * The modification time and deflation hint attributes are not available, * for the JAR manifest file and its containing directory. * * @see #MODIFICATION_TIME * @see #DEFLATE_HINT * @param in a JarInputStream * @param out an OutputStream * @exception IOException if an error is encountered. */ void pack(JarInputStream in, OutputStream out) throws IOException ; /** * Registers a listener for PropertyChange events on the properties map. * This is typically used by applications to update a progress bar. * * <p> The default implementation of this method does nothing and has * no side-effects.</p> * * <p><b>WARNING:</b> This method is omitted from the interface * declaration in all subset Profiles of Java SE that do not include * the {@code java.beans} package. </p> * @see #properties * @see #PROGRESS * @param listener An object to be invoked when a property is changed. * @deprecated The dependency on {@code PropertyChangeListener} creates * a significant impediment to future modularization of the * Java platform. This method will be removed in a future * release. * Applications that need to monitor progress of the packer * can poll the value of the {@link #PROGRESS PROGRESS} * property instead. */ @Deprecated default void addPropertyChangeListener(PropertyChangeListener listener) { } /** * Remove a listener for PropertyChange events, added by * the {@link #addPropertyChangeListener}. * * <p> The default implementation of this method does nothing and has * no side-effects.</p> * * <p><b>WARNING:</b> This method is omitted from the interface * declaration in all subset Profiles of Java SE that do not include * the {@code java.beans} package. </p> * * @see #addPropertyChangeListener * @param listener The PropertyChange listener to be removed. * @deprecated The dependency on {@code PropertyChangeListener} creates * a significant impediment to future modularization of the * Java platform. This method will be removed in a future * release. */ @Deprecated default void removePropertyChangeListener(PropertyChangeListener listener) { } } /** * The unpacker engine converts the packed stream to a JAR file. * An instance of the engine can be obtained * using {@link #newUnpacker}. * <p> * Every JAR file produced by this engine will include the string * "<tt>PACK200</tt>" as a zip file comment. * This allows a deployer to detect if a JAR archive was packed and unpacked. * <p> * Note: Unless otherwise noted, passing a <tt>null</tt> argument to a * constructor or method in this class will cause a {@link NullPointerException} * to be thrown. * <p> * This version of the unpacker is compatible with all previous versions. * @since 1.5 */ public interface Unpacker { /** The string "keep", a possible value for certain properties. * @see #DEFLATE_HINT */ String KEEP = "keep"; /** The string "true", a possible value for certain properties. * @see #DEFLATE_HINT */ String TRUE = "true"; /** The string "false", a possible value for certain properties. * @see #DEFLATE_HINT */ String FALSE = "false"; /** * Property indicating that the unpacker should * ignore all transmitted values for DEFLATE_HINT, * replacing them by the given value, {@link #TRUE} or {@link #FALSE}. * The default value is the special string {@link #KEEP}, * which asks the unpacker to preserve all transmitted * deflation hints. */ String DEFLATE_HINT = "unpack.deflate.hint"; /** * The unpacker's progress as a percentage, as periodically * updated by the unpacker. * Values of 0 - 100 are normal, and -1 indicates a stall. * Progress can be monitored by polling the value of this * property. * <p> * At a minimum, the unpacker must set progress to 0 * at the beginning of a packing operation, and to 100 * at the end. */ String PROGRESS = "unpack.progress"; /** * Get the set of this engine's properties. This set is * a "live view", so that changing its * contents immediately affects the Packer engine, and * changes from the engine (such as progress indications) * are immediately visible in the map. * * <p>The property map may contain pre-defined implementation * specific and default properties. Users are encouraged to * read the information and fully understand the implications, * before modifying pre-existing properties. * <p> * Implementation specific properties are prefixed with a * package name associated with the implementor, beginning * with <tt>com.</tt> or a similar prefix. * All property names beginning with <tt>pack.</tt> and * <tt>unpack.</tt> are reserved for use by this API. * <p> * Unknown properties may be ignored or rejected with an * unspecified error, and invalid entries may cause an * unspecified error to be thrown. * * @return A sorted association of option key strings to option values. */ SortedMap<String,String> properties(); /** * Read a Pack200 archive, and write the encoded JAR to * a JarOutputStream. * The entire contents of the input stream will be read. * It may be more efficient to read the Pack200 archive * to a file and pass the File object, using the alternate * method described below. * <p> * Closes its input but not its output. (The output can accumulate more elements.) * @param in an InputStream. * @param out a JarOutputStream. * @exception IOException if an error is encountered. */ void unpack(InputStream in, JarOutputStream out) throws IOException; /** * Read a Pack200 archive, and write the encoded JAR to * a JarOutputStream. * <p> * Does not close its output. (The output can accumulate more elements.) * @param in a File. * @param out a JarOutputStream. * @exception IOException if an error is encountered. */ void unpack(File in, JarOutputStream out) throws IOException; /** * Registers a listener for PropertyChange events on the properties map. * This is typically used by applications to update a progress bar. * * <p> The default implementation of this method does nothing and has * no side-effects.</p> * * <p><b>WARNING:</b> This method is omitted from the interface * declaration in all subset Profiles of Java SE that do not include * the {@code java.beans} package. </p> * * @see #properties * @see #PROGRESS * @param listener An object to be invoked when a property is changed. * @deprecated The dependency on {@code PropertyChangeListener} creates * a significant impediment to future modularization of the * Java platform. This method will be removed in a future * release. * Applications that need to monitor progress of the * unpacker can poll the value of the {@link #PROGRESS * PROGRESS} property instead. */ @Deprecated default void addPropertyChangeListener(PropertyChangeListener listener) { } /** * Remove a listener for PropertyChange events, added by * the {@link #addPropertyChangeListener}. * * <p> The default implementation of this method does nothing and has * no side-effects.</p> * * <p><b>WARNING:</b> This method is omitted from the interface * declaration in all subset Profiles of Java SE that do not include * the {@code java.beans} package. </p> * * @see #addPropertyChangeListener * @param listener The PropertyChange listener to be removed. * @deprecated The dependency on {@code PropertyChangeListener} creates * a significant impediment to future modularization of the * Java platform. This method will be removed in a future * release. */ @Deprecated default void removePropertyChangeListener(PropertyChangeListener listener) { } } // Private stuff.... private static final String PACK_PROVIDER = "java.util.jar.Pack200.Packer"; private static final String UNPACK_PROVIDER = "java.util.jar.Pack200.Unpacker"; private static Class<?> packerImpl; private static Class<?> unpackerImpl; private synchronized static Object newInstance(String prop) { String implName = "(unknown)"; try { Class<?> impl = (PACK_PROVIDER.equals(prop))? packerImpl: unpackerImpl; if (impl == null) { // The first time, we must decide which class to use. implName = java.security.AccessController.doPrivileged( new sun.security.action.GetPropertyAction(prop,"")); if (implName != null && !implName.equals("")) impl = Class.forName(implName); else if (PACK_PROVIDER.equals(prop)) impl = com.sun.java.util.jar.pack.PackerImpl.class; else impl = com.sun.java.util.jar.pack.UnpackerImpl.class; } // We have a class. Now instantiate it. return impl.newInstance(); } catch (ClassNotFoundException e) { throw new Error("Class not found: " + implName + ":\ncheck property " + prop + " in your properties file.", e); } catch (InstantiationException e) { throw new Error("Could not instantiate: " + implName + ":\ncheck property " + prop + " in your properties file.", e); } catch (IllegalAccessException e) { throw new Error("Cannot access class: " + implName + ":\ncheck property " + prop + " in your properties file.", e); } } }
apache/trafodion
36,141
dcs/src/main/java/org/trafodion/dcs/util/SqlUtils.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.trafodion.dcs.util; import java.math.BigDecimal; import java.math.BigInteger; import java.sql.SQLException; import java.sql.Types; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.Hashtable; import java.util.Arrays; import java.io.UnsupportedEncodingException; import org.trafodion.dcs.Constants; import org.trafodion.dcs.servermt.ServerConstants; import org.trafodion.dcs.servermt.serverDriverInputOutput.Descriptor2; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; public class SqlUtils { private static final Log LOG = LogFactory.getLog(SqlUtils.class); private SqlUtils() { } static private Hashtable valueToCharset; static { valueToCharset = new Hashtable(11); valueToCharset.put(new Integer(1), "ISO8859_1"); // ISO valueToCharset.put(new Integer(10), "MS932"); // SJIS valueToCharset.put(new Integer(11), "UTF-16BE"); // UCS2 valueToCharset.put(new Integer(12), "EUCJP"); // EUCJP valueToCharset.put(new Integer(13), "MS950"); // BIG5 valueToCharset.put(new Integer(14), "GB18030"); // GB18030 valueToCharset.put(new Integer(15), "UTF-8"); // UTF8 valueToCharset.put(new Integer(16), "MS949"); // MB_KSC5601 valueToCharset.put(new Integer(17), "GB2312"); // GB2312 } static private Hashtable charsetToValue; static { charsetToValue = new Hashtable(11); charsetToValue.put("ISO8859_1", new Integer(1)); // ISO charsetToValue.put("MS932", new Integer(10)); // SJIS charsetToValue.put("UTF-16BE", new Integer(11)); // UCS2 charsetToValue.put("EUCJP", new Integer(12)); // EUCJP charsetToValue.put("MS950", new Integer(13)); // BIG5 charsetToValue.put("GB18030", new Integer(14)); // GB18030 charsetToValue.put("UTF-8", new Integer(15)); // UTF8 charsetToValue.put("MS949", new Integer(16)); // MB_KSC5601 charsetToValue.put("GB2312", new Integer(17)); // GB2312 } static final int SQLCHARSETCODE_UNKNOWN = 0; static final String SQLCHARSET_UNKNOWN = "UNKNOWN"; // these are the only real column types static public final int SQLCHARSETCODE_ISO88591 = 1; static public final String SQLCHARSET_ISO88591 = "ISO88591"; static public final int SQLCHARSETCODE_UNICODE = 11; static public final String SQLCHARSET_UNICODE = "UCS2"; // ISO_MAPPING values static public final int SQLCHARSETCODE_SJIS = 10; static public final int SQLCHARSETCODE_UTF8 = 15; static public String getCharsetName(int charset) { String ret = (String) valueToCharset.get(new Integer(charset)); if (ret == null) ret = SQLCHARSET_UNKNOWN; return ret; } static public int getCharsetValue(String charset) { Integer i = (Integer) charsetToValue.get(charset); int ret; if (i == null) ret = SQLCHARSETCODE_UNKNOWN; else ret = i.intValue(); return ret; } static public String getComponentId(int componentId){ String str = "UNKNOWN Component [" + componentId + "]"; switch (componentId){ case ServerConstants.DCS_MASTER_COMPONENT: str = "DCS_MASTER_COMPONENT [" + componentId + "]"; break; case ServerConstants.SQL_COMPONENT: str = "SQL_COMPONENT [" + componentId + "]"; break; case ServerConstants.ODBC_SRVR_COMPONENT: str = "ODBC_SRVR_COMPONENT [" + componentId + "]"; break; case ServerConstants.DRVR_COMPONENT: str = "DRVR_COMPONENT [" + componentId + "]"; break; case ServerConstants.APP_COMPONENT: str = "APP_COMPONENT [" + componentId + "]"; break; case ServerConstants.JDBC_DRVR_COMPONENT: str = "JDBC_DRVR_COMPONENT [" + componentId + "]"; break; case ServerConstants.LINUX_DRVR_COMPONENT: str = "LINUX_DRVR_COMPONENT [" + componentId + "]"; break; case ServerConstants.DOT_NET_DRVR_COMPONENT: str = "DOT_NET_DRVR_COMPONENT [" + componentId + "]"; break; case ServerConstants.WIN_UNICODE_DRVR_COMPONENT: str = "WIN_UNICODE_DRVR_COMPONENT [" + componentId + "]"; break; case ServerConstants.LINUX_UNICODE_DRVR_COMPONENT: str = "LINUX_UNICODE_DRVR_COMPONENT [" + componentId + "]"; break; default: } return str; } static public int getSqlStmtType(int stmtType) { int retQueryType; switch(stmtType){ case ServerConstants.TYPE_SELECT: retQueryType = ServerConstants.SQL_SELECT_NON_UNIQUE; break; case ServerConstants.TYPE_UPDATE: retQueryType = ServerConstants.SQL_UPDATE_NON_UNIQUE; break; case ServerConstants.TYPE_DELETE: retQueryType = ServerConstants.SQL_DELETE_NON_UNIQUE; break; case ServerConstants.TYPE_INSERT: retQueryType = ServerConstants.SQL_INSERT_NON_UNIQUE; break; default: retQueryType = stmtType; } return retQueryType; } static public short getSqlStmtType(String str) { // // Kludge to determin if the type of statement. // String tokens[] = str.split("[^a-zA-Z]+", 3); short rt1 = ServerConstants.TYPE_UNKNOWN; String str3 = ""; // // If there are no separators (i.e. no spaces, {, =, etc.) in front of // the // first token, then the first token is the key word we are looking for. // Else, the first token is an empty string (i.e. split thinks the first // token is the empty string followed by a separator), and the second // token is the key word we are looking for. // if (tokens[0].length() > 0) { str3 = tokens[0].toUpperCase(); } else { str3 = tokens[1].toUpperCase(); } if ((str3.equals("SELECT")) || (str3.equals("SHOWSHAPE")) || (str3.equals("INVOKE")) || (str3.equals("SHOWCONTROL")) || (str3.equals("SHOWPLAN"))) { rt1 = ServerConstants.SQL_SELECT_NON_UNIQUE; // rt1 = ServerConstants.TYPE_SELECT; } else if (str3.equals("UPDATE")) { rt1 = ServerConstants.SQL_UPDATE_NON_UNIQUE; // rt1 = ServerConstants.TYPE_UPDATE; } else if (str3.equals("DELETE")) { rt1 = ServerConstants.SQL_DELETE_NON_UNIQUE; // rt1 = ServerConstants.TYPE_DELETE; } else if (str3.equals("INSERT") || (str.equals("UPSERT"))) { if (str.indexOf('?') == -1) { rt1 = ServerConstants.SQL_INSERT_NON_UNIQUE; // rt1 = ServerConstants.TYPE_INSERT; } else { rt1 = ServerConstants.TYPE_INSERT_PARAM; } } else if (str3.equals("EXPLAIN")) { rt1 = ServerConstants.TYPE_EXPLAIN; } else if (str3.equals("CREATE")) { rt1 = ServerConstants.TYPE_CREATE; } else if (str3.equals("GRANT")) { rt1 = ServerConstants.TYPE_GRANT; } else if (str3.equals("DROP")) { rt1 = ServerConstants.TYPE_DROP; } else if (str3.equals("CALL")) { rt1 = ServerConstants.TYPE_CALL; } else if (str3.equals("EXPLAIN")) { rt1 = ServerConstants.TYPE_EXPLAIN; } else if (str3.equals("CONTROL")) { rt1 = ServerConstants.TYPE_CONTROL; } else { rt1 = ServerConstants.TYPE_UNKNOWN; } return rt1; } static public String getSqlCharsetName(int code) { String str = ServerConstants.sqlCharsetSTRING_UNKNOWN; switch (code){ case ServerConstants.sqlCharsetCODE_ISO88591: return ServerConstants.sqlCharsetSTRING_ISO88591; case ServerConstants.sqlCharsetCODE_KANJI: return ServerConstants.sqlCharsetSTRING_KANJI; case ServerConstants.sqlCharsetCODE_KSC5601: return ServerConstants.sqlCharsetSTRING_KSC5601; case ServerConstants.sqlCharsetCODE_SJIS: return ServerConstants.sqlCharsetSTRING_SJIS; case ServerConstants.sqlCharsetCODE_UCS2: return ServerConstants.sqlCharsetSTRING_UNICODE; default: } return str; } static public String getSqlError(int retcode) { String rc; // SQL_NO_DATA_FOUND can be defined as SQL_NO_DATA if (retcode==ServerConstants.SQL_NO_DATA_FOUND) { if (ServerConstants.SQL_NO_DATA_FOUND==ServerConstants.SQL_NO_DATA) return "SQL_NO_DATA_FOUND|SQL_NO_DATA" ; return("SQL_NO_DATA_FOUND"); } switch (retcode) { case ServerConstants.SQL_SUCCESS: return("SQL_SUCCESS"); case ServerConstants.SQL_SUCCESS_WITH_INFO: return("SQL_SUCCESS_WITH_INFO"); case ServerConstants.SQL_NO_DATA: return("SQL_NO_DATA"); case ServerConstants.SQL_ERROR: return("SQL_ERROR"); /* case ServerConstants.SQL_INVALID_HANDLE: return("SQL_INVALID_HANDLE"); case ServerConstants.SQL_STILL_EXECUTING: return("SQL_STILL_EXECUTING"); case ServerConstants.SQL_NEED_DATA: return("SQL_NEED_DATA"); case ServerConstants.STMT_ID_MISMATCH_ERROR: return("STMT_ID_MISMATCH_ERROR"); case ServerConstants.DIALOGUE_ID_NULL_ERROR: return("DIALOGUE_ID_NULL_ERROR"); case ServerConstants.STMT_ID_NULL_ERROR: return("STMT_ID_NULL_ERROR"); case ServerConstants.NOWAIT_PENDING: return("NOWAIT_PENDING"); case ServerConstants.STMT_ALREADY_EXISTS: return("STMT_ALREADY_EXISTS"); case ServerConstants.STMT_DOES_NOT_EXIST: return("STMT_DOES_NOT_EXIST"); case ServerConstants.STMT_IS_NOT_CALL: return("STMT_IS_NOT_CALL"); case ServerConstants.RS_INDEX_OUT_OF_RANGE: return("RS_INDEX_OUT_OF_RANGE"); case ServerConstants.RS_ALREADY_EXISTS: return("RS_ALREADY_EXISTS"); case ServerConstants.RS_ALLOC_ERROR: return("RS_ALLOC_ERROR"); case ServerConstants.RS_DOES_NOT_EXIST: return("RS_DOES_NOT_EXIST"); case ServerConstants.PROGRAM_ERROR: return("PROGRAM_ERROR"); case ServerConstants.ODBC_SERVER_ERROR: return("ODBC_SERVER_ERROR"); case ServerConstants.ODBC_RG_ERROR: return("ODBC_RG_ERROR"); case ServerConstants.ODBC_RG_WARNING: return("ODBC_RG_WARNING"); case ServerConstants.SQL_RETRY_COMPILE_AGAIN: return("SQL_RETRY_COMPILE_AGAIN"); case SQL_QUERY_CANCELLED: return("SQL_QUERY_CANCELLED"); case ServerConstants.CANCEL_NOT_POSSIBLE: return("CANCEL_NOT_POSSIBLE"); case ServerConstants.NOWAIT_ERROR: return("NOWAIT_ERROR"); */ } rc = "Unknown SQL Error (" + retcode + ")"; return rc; } static public String getSqlStatementType(short stmtType) { String rc; if (stmtType == ServerConstants.TYPE_UNKNOWN) return "TYPE_UNKNOWN"; rc = ""; if ((stmtType & ServerConstants.TYPE_SELECT) != 0) rc = rc + "|TYPE_SELECT"; if ((stmtType & ServerConstants.TYPE_UPDATE) != 0) rc = rc + "|TYPE_UPDATE"; if ((stmtType & ServerConstants.TYPE_DELETE) != 0) rc = rc + "|TYPE_DELETE"; if ((stmtType & ServerConstants.TYPE_INSERT) != 0) rc = rc + "|TYPE_INSERT"; if ((stmtType & ServerConstants.TYPE_EXPLAIN) != 0) rc = rc + "|TYPE_EXPLAIN"; if ((stmtType & ServerConstants.TYPE_CREATE) != 0) rc = rc + "|TYPE_CREATE"; if ((stmtType & ServerConstants.TYPE_GRANT) != 0) rc = rc + "|TYPE_GRANT"; if ((stmtType & ServerConstants.TYPE_DROP) != 0) rc = rc + "|TYPE_DROP"; if ((stmtType & ServerConstants.TYPE_CALL) != 0) rc = rc + "|TYPE_CALL"; if (rc.length()==0) rc = "UNKNOWN(" + stmtType +")"; return rc; } static public String getSqlQueryStatementType(int stmtType) { switch (stmtType) { case ServerConstants.INVALID_SQL_QUERY_STMT_TYPE: return("INVALID_SQL_QUERY_STMT_TYPE"); case ServerConstants.SQL_OTHER: return("SQL_OTHER"); case ServerConstants.SQL_UNKNOWN: return("SQL_UNKNOWN"); case ServerConstants.SQL_SELECT_UNIQUE: return("SQL_SELECT_UNIQUE"); case ServerConstants.SQL_SELECT_NON_UNIQUE: return("SQL_SELECT_NON_UNIQUE"); case ServerConstants.SQL_INSERT_UNIQUE: return("SQL_INSERT_UNIQUE"); case ServerConstants.SQL_INSERT_NON_UNIQUE: return("SQL_INSERT_NON_UNIQUE"); case ServerConstants.SQL_UPDATE_UNIQUE: return("SQL_UPDATE_UNIQUE"); case ServerConstants.SQL_UPDATE_NON_UNIQUE: return("SQL_UPDATE_NON_UNIQUE"); case ServerConstants.SQL_DELETE_UNIQUE: return("SQL_DELETE_UNIQUE"); case ServerConstants.SQL_DELETE_NON_UNIQUE: return("SQL_DELETE_NON_UNIQUE"); case ServerConstants.SQL_CONTROL: return("SQL_CONTROL"); case ServerConstants.SQL_SET_TRANSACTION: return("SQL_SET_TRANSACTION"); case ServerConstants.SQL_SET_CATALOG: return("SQL_SET_CATALOG"); case ServerConstants.SQL_SET_SCHEMA: return("SQL_SET_SCHEMA"); case ServerConstants.SQL_CALL_NO_RESULT_SETS: return("SQL_CALL_NO_RESULT_SETS"); case ServerConstants.SQL_CALL_WITH_RESULT_SETS: return("SQL_CALL_WITH_RESULT_SETS"); case ServerConstants.SQL_SP_RESULT_SET: return("SQL_SP_RESULT_SET"); } return "Unknown (" + stmtType + ")"; }; static public String getSqlAttrType(int code) { String rc; switch (code) { case ServerConstants.SQL_ATTR_CURSOR_HOLDABLE: return("SQL_ATTR_CURSOR_HOLDABLE"); case ServerConstants.SQL_ATTR_INPUT_ARRAY_MAXSIZE: return("SQL_ATTR_INPUT_ARRAY_MAXSIZE"); case ServerConstants.SQL_ATTR_QUERY_TYPE: return("SQL_ATTR_QUERY_TYPE"); case ServerConstants.SQL_ATTR_MAX_RESULT_SETS: return("SQL_ATTR_MAX_RESULT_SETS"); } rc = "Unknown (" + code + ")"; return rc; } static public String getSqlDataType(int code) { String rc; switch (code) { case ServerConstants.SQLTYPECODE_CHAR: return("SQLTYPECODE_CHAR"); case ServerConstants.SQLTYPECODE_NUMERIC: return("SQLTYPECODE_NUMERIC"); case ServerConstants.SQLTYPECODE_NUMERIC_UNSIGNED: return("SQLTYPECODE_NUMERIC_UNSIGNED"); case ServerConstants.SQLTYPECODE_DECIMAL: return("SQLTYPECODE_DECIMAL"); case ServerConstants.SQLTYPECODE_DECIMAL_UNSIGNED: return("SQLTYPECODE_DECIMAL_UNSIGNED"); case ServerConstants.SQLTYPECODE_DECIMAL_LARGE: return("SQLTYPECODE_DECIMAL_LARGE"); case ServerConstants.SQLTYPECODE_DECIMAL_LARGE_UNSIGNED: return("SQLTYPECODE_DECIMAL_LARGE_UNSIGNED"); case ServerConstants.SQLTYPECODE_INTEGER: return("SQLTYPECODE_INTEGER"); case ServerConstants.SQLTYPECODE_INTEGER_UNSIGNED: return("SQLTYPECODE_INTEGER_UNSIGNED"); case ServerConstants.SQLTYPECODE_LARGEINT: return("SQLTYPECODE_LARGEINT"); case ServerConstants.SQLTYPECODE_SMALLINT: return("SQLTYPECODE_SMALLINT"); case ServerConstants.SQLTYPECODE_SMALLINT_UNSIGNED: return("SQLTYPECODE_SMALLINT_UNSIGNED"); case ServerConstants.SQLTYPECODE_BPINT_UNSIGNED: return("SQLTYPECODE_BPINT_UNSIGNED"); case ServerConstants.SQLTYPECODE_TDM_FLOAT: return("SQLTYPECODE_TDM_FLOAT"); case ServerConstants.SQLTYPECODE_IEEE_FLOAT: return("SQLTYPECODE_IEEE_FLOAT"); case ServerConstants.SQLTYPECODE_TDM_REAL: return("SQLTYPECODE_TDM_REAL"); case ServerConstants.SQLTYPECODE_IEEE_REAL: return("SQLTYPECODE_IEEE_REAL"); case ServerConstants.SQLTYPECODE_TDM_DOUBLE: return("SQLTYPECODE_TDM_DOUBLE"); case ServerConstants.SQLTYPECODE_IEEE_DOUBLE: return("SQLTYPECODE_IEEE_DOUBLE"); case ServerConstants.SQLTYPECODE_DATETIME: return("SQLTYPECODE_DATETIME"); case ServerConstants.SQLTYPECODE_INTERVAL: return("SQLTYPECODE_INTERVAL"); case ServerConstants.SQLTYPECODE_VARCHAR: return("SQLTYPECODE_VARCHAR"); case ServerConstants.SQLTYPECODE_VARCHAR_WITH_LENGTH: return("SQLTYPECODE_VARCHAR_WITH_LENGTH"); case ServerConstants.SQLTYPECODE_VARCHAR_LONG: return("SQLTYPECODE_VARCHAR_LONG"); case ServerConstants.SQLTYPECODE_BIT: return("SQLTYPECODE_BIT"); case ServerConstants.SQLTYPECODE_BITVAR: return("SQLTYPECODE_BITVAR"); }; rc = "UNKNOWN (" + code + ")"; return rc; } static public String getDataType(int dataType) { switch (dataType) { case Types.SMALLINT: return "SMALLINT"; case Types.INTEGER: return "INTEGER"; case Types.BIGINT: return "BIGINT"; case Types.REAL: return "REAL"; case Types.FLOAT: return "FLOAT"; case Types.DOUBLE: return "DOUBLE PRECISION"; case Types.NUMERIC: return "NUMERIC"; case Types.DECIMAL: return "DECIMAL"; case Types.CHAR: return "CHAR"; case Types.VARCHAR: return "VARCHAR"; case Types.LONGVARCHAR: return "LONG VARCHAR"; case Types.DATE: return "DATE"; case Types.TIME: return "TIME"; case Types.TIMESTAMP: return "TIMESTAMP"; case Types.BLOB: return "BLOB"; case Types.CLOB: return "CLOB"; case Types.OTHER: return "OTHER"; case Types.BIT: return "BIT"; case Types.TINYINT: return "TINYINT"; default: return "UNKNOWN (" + dataType + ")"; } } static public BigDecimal getBigDecimalValue(Object paramValue) throws SQLException { BigDecimal tmpbd; if (paramValue instanceof Long) { tmpbd = BigDecimal.valueOf(((Long) paramValue).longValue()); } else if (paramValue instanceof Integer) { tmpbd = BigDecimal.valueOf(((Integer) paramValue).longValue()); } else if (paramValue instanceof BigDecimal) { tmpbd = (BigDecimal) paramValue; } else if (paramValue instanceof String) { String sVal = (String) paramValue; if (sVal.equals("true") == true) { sVal = "1"; } else if (sVal.equals("false") == true) { sVal = "0"; } tmpbd = new BigDecimal(sVal); } else if (paramValue instanceof Float) { tmpbd = new BigDecimal(paramValue.toString()); } else if (paramValue instanceof Double) { tmpbd = new BigDecimal(((Double) paramValue).toString()); } else if (paramValue instanceof Boolean) { tmpbd = BigDecimal.valueOf(((((Boolean) paramValue).booleanValue() == true) ? 1 : 0)); } else if (paramValue instanceof Byte) { tmpbd = BigDecimal.valueOf(((Byte) paramValue).longValue()); } else if (paramValue instanceof Short) { tmpbd = BigDecimal.valueOf(((Short) paramValue).longValue()); } else if (paramValue instanceof Integer) { tmpbd = BigDecimal.valueOf(((Integer) paramValue).longValue()); // For LOB Support SB: 10/25/2004 /* * else if (paramValue instanceof DataWrapper) tmpbd = * BigDecimal.valueOf(((DataWrapper)paramValue).longValue); */ } else { throw new SQLException("object_type_not_supported"); } return tmpbd; } // end getBigDecimalValue static public BigDecimal convertSQLBigNumToBigDecimal(ByteBuffer sourceData, int len, int scale, boolean isUnSigned) { String strVal = ""; // our final String boolean negative = false; // we need the data in an array which can hold UNSIGNED 16 bit values // in java we dont have unsigned datatypes so 32-bit signed is the best // we can do int[] dataInShorts = new int[len / 2]; for (int i = 0; i < dataInShorts.length; i++){ dataInShorts[i] = ByteBufferUtils.extractUShort(sourceData); // copy // the // data LOG.debug("dataInShorts[" + i + "] :" + dataInShorts[i]); } if (isUnSigned == false){ if ((dataInShorts[dataInShorts.length - 1] & 0xFF00) > 0){ negative = ((dataInShorts[dataInShorts.length - 1] & 0x8000) > 0); dataInShorts[dataInShorts.length - 1] &= 0x7FFF; // force sign to 0, continue } else { negative = ((dataInShorts[dataInShorts.length - 1] & 0x0080) > 0); dataInShorts[dataInShorts.length - 1] &= 0xFF7F; // force sign to 0, continue } // normally } int curPos = dataInShorts.length - 1; // start at the end while (curPos >= 0 && dataInShorts[curPos] == 0) // get rid of any trailing 0's curPos--; int remainder = 0; long temp; // we need to use a LONG since we will have to hold up to // 32-bit UNSIGNED values // we now have the huge value stored in 2 bytes chunks // we will divide by 10000 many times, converting the remainder to // String // when we are left with a single chunk <10000 we will handle it using a // special case while (curPos >= 0 || dataInShorts[0] >= 10000) { // start on the right, divide the 16 bit value by 10000 // use the remainder as the upper 16 bits for the next division for (int j = curPos; j >= 0; j--) { // these operations got messy when java tried to infer what size // to store the value in // leave these as separate operations for now...always casting // back to a 64 bit value to avoid sign problems temp = remainder; temp &= 0xFFFF; temp = temp << 16; temp += dataInShorts[j]; dataInShorts[j] = (int) (temp / 10000); remainder = (int) (temp % 10000); } // if we are done with the current 16bits, move on if (dataInShorts[curPos] == 0) curPos--; // go through the remainder and add each digit to the final String for (int j = 0; j < 4; j++) { strVal = (remainder % 10) + strVal; remainder /= 10; } } // when we finish the above loop we still have 1 <10000 value to include remainder = dataInShorts[0]; for (int j = 0; j < 4; j++) { strVal = (remainder % 10) + strVal; remainder /= 10; } BigInteger bi = new BigInteger(strVal); // create a java BigInt if (negative && isUnSigned == false) bi = bi.negate(); return new BigDecimal(bi, scale); // create a new BigDecimal with the // descriptor's scale } static public byte[] formatSqlT4Output(Descriptor2 dsc, byte[] sqlarray, long curOutPos, byte[] outValues, ByteOrder bo) throws UnsupportedEncodingException{ ByteBuffer bb = null; byte[] dst = null; int len = 0; int offset = 0; int insNull = 0; String[] stDate = null; String[] stTime = null; String[] stTimestamp = null; String[] stNanos = null; Integer year = 0; Integer month = 0; Integer day = 0; Integer hour = 0; Integer minutes = 0; Integer seconds = 0; Integer nanos = 0; String setString = ""; short setShort = 0; int setInt = 0; long setLong = 0L; boolean setSign = false; String charSet = ""; int precision = dsc.getPrecision(); int scale = dsc.getScale(); int datetimeCode = dsc.getDatetimeCode(); int FSDataType = dsc.getFsDataType(); int OdbcDataType = dsc.getOdbcDataType(); int dataCharSet = dsc.getSqlCharset(); int length = dsc.getMaxLen(); int dataType = dsc.getDataType(); if(dataCharSet == SqlUtils.SQLCHARSETCODE_UNICODE) charSet = "UTF-16LE"; else charSet = SqlUtils.getCharsetName(dataCharSet); len = sqlarray.length; ByteBuffer tb = ByteBuffer.wrap(sqlarray).order(bo); if(LOG.isDebugEnabled()) LOG.debug("formatSqlT4Output: -----------"); switch(dataType){ case ServerConstants.SQLTYPECODE_CHAR: if(LOG.isDebugEnabled()) LOG.debug("formatSqlT4Output: SQLTYPECODE_CHAR :" + Arrays.toString(sqlarray)); break; case ServerConstants.SQLTYPECODE_VARCHAR: case ServerConstants.SQLTYPECODE_VARCHAR_LONG: if(LOG.isDebugEnabled()) LOG.debug("formatSqlT4Output: SQLTYPECODE_VARCHAR/VARCHAR_LONG :" + Arrays.toString(sqlarray)); break; case ServerConstants.SQLTYPECODE_VARCHAR_WITH_LENGTH: //-601 if( length > Short.MAX_VALUE ){ len = tb.getInt() + 4; } else { len = tb.getShort() + 2; } if(LOG.isDebugEnabled()) LOG.debug("formatSqlT4Output: SQLTYPECODE_VARCHAR :" + Arrays.toString(sqlarray)); break; case ServerConstants.SQLTYPECODE_DATETIME: LOG.debug("formatSqlT4Output: SQLTYPECODE_DATETIME :" + Arrays.toString(sqlarray)); switch(datetimeCode){ case ServerConstants.SQLDTCODE_DATE: case ServerConstants.SQLDTCODE_TIME: case ServerConstants.SQLDTCODE_TIMESTAMP: len = tb.getShort(); dst = new byte[len]; System.arraycopy(sqlarray, 2, dst, 0, len); setString = new String(dst,"UTF8"); tb.clear(); switch(datetimeCode){ case ServerConstants.SQLDTCODE_DATE: stDate = setString.split("-"); year = Integer.valueOf(stDate[0]); month = Integer.valueOf(stDate[1]); day = Integer.valueOf(stDate[2]); tb.putShort(year.shortValue()); tb.put(month.byteValue()); tb.put(day.byteValue()); break; case ServerConstants.SQLDTCODE_TIME: stTime = setString.split(":"); hour = Integer.valueOf(stTime[0]); minutes = Integer.valueOf(stTime[1]); seconds = Integer.valueOf(stTime[2]); tb.put(hour.byteValue()); tb.put(minutes.byteValue()); tb.put(seconds.byteValue()); break; case ServerConstants.SQLDTCODE_TIMESTAMP: stTimestamp = setString.split(" "); stDate = stTimestamp[0].split("-"); stNanos = stTimestamp[1].split("\\."); stTime = stNanos[0].split(":"); year = Integer.valueOf(stDate[0]); month = Integer.valueOf(stDate[1]); day = Integer.valueOf(stDate[2]); hour = Integer.valueOf(stTime[0]); minutes = Integer.valueOf(stTime[1]); seconds = Integer.valueOf(stTime[2]); nanos = Integer.valueOf(stNanos[1]); tb.putShort(year.shortValue()); tb.put(month.byteValue()); tb.put(day.byteValue()); tb.put(hour.byteValue()); tb.put(minutes.byteValue()); tb.put(seconds.byteValue()); tb.putInt(nanos); break; case ServerConstants.SQLDTCODE_MPDATETIME: break; } default: } break; case ServerConstants.SQLTYPECODE_INTERVAL: if(LOG.isDebugEnabled()) LOG.debug("formatSqlT4Output: SQLTYPECODE_INTERVAL :" + Arrays.toString(sqlarray)); len = tb.getShort(); dst = new byte[len]; System.arraycopy(sqlarray, 2, dst, 0, len); Arrays.fill(sqlarray, (byte)0); tb.clear(); if(dst[0] != '-'){ len++; tb.put((byte)' '); } tb.put(dst, 0, dst.length); if(LOG.isDebugEnabled()) LOG.debug("formatSqlT4Output: sqlarray :" + Arrays.toString(sqlarray)); break; case ServerConstants.SQLTYPECODE_INTEGER: //4 if(LOG.isDebugEnabled()) LOG.debug("formatSqlT4Output: SQLTYPECODE_INTEGER :" + Arrays.toString(sqlarray)); break; case ServerConstants.SQLTYPECODE_INTEGER_UNSIGNED: if(LOG.isDebugEnabled()) LOG.debug("formatSqlT4Output: SQLTYPECODE_INTEGER_UNSIGNED :" + Arrays.toString(sqlarray)); break; case ServerConstants.SQLTYPECODE_SMALLINT: if(LOG.isDebugEnabled()) LOG.debug("formatSqlT4Output: SQLTYPECODE_SMALLINT :" + Arrays.toString(sqlarray)); break; case ServerConstants.SQLTYPECODE_SMALLINT_UNSIGNED: if(LOG.isDebugEnabled()) LOG.debug("formatSqlT4Output: SQLTYPECODE_SMALLINT_UNSIGNED :" + Arrays.toString(sqlarray)); break; case ServerConstants.SQLTYPECODE_LARGEINT: if(LOG.isDebugEnabled()) LOG.debug("formatSqlT4Output: SQLTYPECODE_LARGEINT :" + Arrays.toString(sqlarray)); break; case ServerConstants.SQLTYPECODE_DECIMAL: case ServerConstants.SQLTYPECODE_DECIMAL_UNSIGNED: if(LOG.isDebugEnabled()) LOG.debug("formatSqlT4Output: SQLTYPECODE_DECIMAL :" + Arrays.toString(sqlarray)); break; case ServerConstants.SQLTYPECODE_IEEE_REAL: //6 if(LOG.isDebugEnabled()) LOG.debug("formatSqlT4Output: SQLTYPECODE_IEEE_REAL :" + Arrays.toString(sqlarray)); break; case ServerConstants.SQLTYPECODE_IEEE_FLOAT: //7 if(LOG.isDebugEnabled()) LOG.debug("formatSqlT4Output: SQLTYPECODE_IEEE_FLOAT :" + Arrays.toString(sqlarray)); break; case ServerConstants.SQLTYPECODE_IEEE_DOUBLE: //8 if(LOG.isDebugEnabled()) LOG.debug("formatSqlT4Output: SQLTYPECODE_IEEE_DOUBLE :" + Arrays.toString(sqlarray)); break; case ServerConstants.SQLTYPECODE_NUMERIC: case ServerConstants.SQLTYPECODE_NUMERIC_UNSIGNED: if(LOG.isDebugEnabled()) LOG.debug("formatSqlT4Output: SQLTYPECODE_NUMERIC :" + Arrays.toString(sqlarray)); switch (len) { case 2: setShort = tb.getShort(); setLong = setShort; if(setLong < 0){ setLong = -1L * setLong; setSign = true; tb.clear(); tb.put((byte) ((setLong) & 0xff)); tb.put((byte) (((setLong >>> 8) | 0x80) & 0xff)); } break; case 4: setInt = tb.getInt(); setLong = setInt; if(setLong < 0){ setLong = -1L * setLong; setSign = true; tb.clear(); tb.put((byte) ((setLong) & 0xff)); tb.put((byte) ((setLong >>> 8) & 0xff)); tb.put((byte) ((setLong >>> 16) & 0xff)); tb.put((byte) (((setLong >>> 24) | 0x80) & 0xff)); } break; case 8: setLong = tb.getLong(); if(setLong < 0){ setLong = -1L * setLong; setSign = true; tb.clear(); tb.put((byte) ((setLong) & 0xff)); tb.put((byte) ((setLong >>> 8) & 0xff)); tb.put((byte) ((setLong >>> 16) & 0xff)); tb.put((byte) ((setLong >>> 24) & 0xff)); tb.put((byte) ((setLong >>> 32) & 0xff)); tb.put((byte) ((setLong >>> 40) & 0xff)); tb.put((byte) ((setLong >>> 48) & 0xff)); tb.put((byte) (((setLong >>> 56) | 0x80) & 0xff)); } break; } break; case ServerConstants.SQLTYPECODE_DECIMAL_LARGE: case ServerConstants.SQLTYPECODE_DECIMAL_LARGE_UNSIGNED: case ServerConstants.SQLTYPECODE_BIT: case ServerConstants.SQLTYPECODE_BITVAR: case ServerConstants.SQLTYPECODE_BPINT_UNSIGNED: default: if(LOG.isDebugEnabled()) LOG.debug("formatSqlT4Output: default :" + Arrays.toString(sqlarray)); break; } if(LOG.isDebugEnabled()){ LOG.debug("formatSqlT4Output: offset :" + offset); LOG.debug("formatSqlT4Output: curOutPos :" + curOutPos); LOG.debug("formatSqlT4Output: len :" + len); } System.arraycopy(sqlarray, offset, outValues, (int)curOutPos, len); return outValues; } }
apache/sis
36,426
endorsed/src/org.apache.sis.metadata/main/org/apache/sis/metadata/sql/internal/shared/ScriptRunner.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sis.metadata.sql.internal.shared; import java.util.Map; import java.util.HashMap; import java.util.Locale; import java.util.regex.Pattern; import java.util.regex.Matcher; import java.io.FileNotFoundException; import java.io.EOFException; import java.io.IOException; import java.io.BufferedReader; import java.io.LineNumberReader; import java.io.InputStreamReader; import java.io.InputStream; import java.io.StringReader; import java.nio.charset.StandardCharsets; import java.sql.Statement; import java.sql.Connection; import java.sql.SQLException; import java.sql.DatabaseMetaData; import org.apache.sis.util.ArgumentChecks; import org.apache.sis.util.CharSequences; import org.apache.sis.util.internal.shared.Strings; import org.apache.sis.util.resources.Errors; /** * Run SQL scripts. The script is expected to use a standardized syntax, where the {@value #QUOTE} character * is used for quoting text, the {@value #IDENTIFIER_QUOTE} character is used for quoting identifier and the * {@value #END_OF_STATEMENT} character is used at the end for every SQL statement. Those characters will be * replaced on-the-fly by the characters actually used by the database engine. * * <p><strong>This class is not intended for executing arbitrary SQL scripts.</strong> * This class is for executing known scripts bundled with Apache SIS or in an extension * (for example the scripts for creating the EPSG database). We do not try to support SQL * functionalities other than what we need for those scripts.</p> * * @author Martin Desruisseaux (Geomatys) * @author Johann Sorel (Geomatys) */ public class ScriptRunner implements AutoCloseable { /** * The sequence for SQL comments. Leading lines starting by those characters will be ignored. */ private static final String COMMENT = "--"; /** * The quote character expected to be found in the SQL script. * This character shall not be a whitespace or a Unicode identifier part. */ private static final char QUOTE = '\''; /** * The quote character for identifiers expected to be found in the SQL script. * This character shall not be a whitespace or a Unicode identifier part. */ private static final char IDENTIFIER_QUOTE = '"'; /** * The character at the end of statements. * This character shall not be a whitespace or a Unicode identifier part. */ private static final char END_OF_STATEMENT = ';'; /** * The presumed dialect spoken by the database. */ private final Dialect dialect; /** * A mapping of words to replace. The replacements are performed only for occurrences outside identifiers or texts. * See {@link #addReplacement(String, String)} for more explanation. * * @see #addReplacement(String, String) */ private final Map<String,String> replacements; /** * The quote character for identifiers actually used in the database, * as determined by {@link DatabaseMetaData#getIdentifierQuoteString()}. */ protected final String identifierQuote; /** * {@code true} if the database supports enums. * Example: * * {@snippet lang="sql" : * CREATE TYPE metadata."CI_DateTypeCode" AS ENUM ('creation', 'publication'); * CREATE CAST (VARCHAR AS metadata."CI_DateTypeCode") WITH INOUT AS ASSIGNMENT; * } * * <p>Notes per database product:</p> * <ul> * <li><b>PostgreSQL:</b> while enumeration were introduced in PostgreSQL 8.3, * we require PostgreSQL 8.4 because we need the {@code CAST … WITH INOUT} feature.</li> * <li><b>Other databases:</b> assumed not supported.</li> * </ul> * * @see #statementsToSkip */ protected final boolean isEnumTypeSupported; /** * The maximum number of rows allowed per {@code "INSERT"} statement. * This is 1 if the database does not support multi-rows insertion. * For other database, this is set to an arbitrary "reasonable" value since attempts to insert * too many rows with a single statement on Derby database cause a {@link StackOverflowError}. */ private final int maxRowsPerInsert; /** * The statement created from a connection to the database. */ private final Statement statement; /** * If non-null, the SQL statements to skip (typically because not supported by the database). * The matcher is built as an alternation of many regular expressions separated by the pipe symbol. * The list of statements to skip depends on which {@code is*Supported} fields are set to {@code true}: * * <ul> * <li>{@link #isEnumTypeSupported} for {@code "CREATE TYPE …"} or {@code "CREATE CAST …"} statements.</li> * <li>{@link Dialect#supportsGrantUsageOnSchema} for {@code "GRANT USAGE ON SCHEMA …"} statements.</li> * <li>{@link Dialect#supportsGrantSelectOnTable} for {@code "GRANT SELECT ON TABLE …"} statements.</li> * <li>{@link Dialect#supportsComment} for {@code "COMMENT ON …"} statements.</li> * </ul> */ private Matcher statementsToSkip; /** * The regular expression to use for building {@link #statementsToSkip}. * At most one of {@code regexOfStmtToSkip} and {@code statementsToSkip} shall be non-null. * Both fields may be null if there are no statements to skip. */ private StringBuilder regexOfStmtToSkip; /** * Name of the SQL script under execution, or {@code null} if unknown. * This is used only for error reporting. */ private String currentFile; /** * The line number of the SQL statement being executed. The first line in a file is numbered 1. * This is used only for error reporting. */ private int currentLine; /** * The SQL statement being executed. * This is used only for error reporting. */ private String currentSQL; /** * Creates a new runner which will execute the statements using the given connection. * * <p>Some {@code maxRowsPerInsert} parameter values of interest:</p> * <ul> * <li>A value of 0 means to create only the schema without inserting any data in them.</li> * <li>A value of 1 means to use one separated {@code INSERT INTO} statement for each row, which may be slow.</li> * <li>A value of 100 is a value which have been found empirically as giving good results.</li> * <li>A value of {@link Integer#MAX_VALUE} means to not perform any attempt to limit the number of rows in an * {@code INSERT INTO} statement. Note that this causes {@link StackOverflowError} in some JDBC driver.</li> * </ul> * * The {@code schemaToCreate} argument is ignored if not supported by the database. * * @param connection the connection to the database. * @param schemaToCreate schema to create and set as the default schema, or {@code null} if none. * @param maxRowsPerInsert maximum number of rows per {@code "INSERT INTO"} statement. * @throws SQLException if an error occurred while creating a SQL statement. */ public ScriptRunner(final Connection connection, final String schemaToCreate, final int maxRowsPerInsert) throws SQLException { ArgumentChecks.ensurePositive("maxRowsPerInsert", maxRowsPerInsert); final DatabaseMetaData metadata; this.maxRowsPerInsert = maxRowsPerInsert; replacements = new HashMap<>(); metadata = connection.getMetaData(); dialect = Dialect.guess(metadata); identifierQuote = metadata.getIdentifierQuoteString(); if (schemaToCreate != null && metadata.supportsSchemasInTableDefinitions()) { try (Statement stmt = connection.createStatement()) { stmt.executeUpdate("CREATE SCHEMA " + identifierQuote + schemaToCreate + identifierQuote); if (dialect.supportsGrantUsageOnSchema()) { stmt.executeUpdate("GRANT USAGE ON SCHEMA " + identifierQuote + schemaToCreate + identifierQuote + " TO PUBLIC"); } } connection.setSchema(schemaToCreate); // Must be set before the next call to `createStatement()` below. } statement = connection.createStatement(); switch (dialect) { default: { isEnumTypeSupported = false; break; } case POSTGRESQL: { final int version = metadata.getDatabaseMajorVersion(); isEnumTypeSupported = (version == 8) ? metadata.getDatabaseMinorVersion() >= 4 : version >= 8; break; } case HSQL: { isEnumTypeSupported = false; /* * HSQLDB stores tables in memory by default. For storing the tables on files, we have to * use "CREATE CACHED TABLE" statement, which is HSQL-specific. For avoiding SQL dialect, * the following statement change the default setting on current connection. * * Reference: http://hsqldb.org/doc/guide/dbproperties-chapt.html#dpc_db_props_url */ statement.execute("SET DATABASE DEFAULT TABLE TYPE CACHED"); break; } } /* * Now build the list of statements to skip, depending of which features are supported by the database. * WARNING: do not use capturing group here, because some subclasses (e.g. EPSGInstaller) will use their * own capturing groups. A non-capturing group is declared by "(?:A|B)" instead of a plain "(A|B)". */ if (!isEnumTypeSupported) { addStatementToSkip("CREATE\\s+(?:TYPE|CAST)\\s+.*"); } if (!dialect.supportsAllGrants()) { addStatementToSkip("GRANT\\s+\\w+\\s+ON\\s+"); if (dialect.supportsGrantUsageOnSchema()) { regexOfStmtToSkip.append("TABLE"); } else if (dialect.supportsGrantSelectOnTable()) { regexOfStmtToSkip.append("SCHEMA"); } else { regexOfStmtToSkip.append("(?:TABLE|SCHEMA)"); } regexOfStmtToSkip.append("\\s+.*"); } if (!dialect.supportsComment()) { addStatementToSkip("COMMENT\\s+ON\\s+.*"); } if (!dialect.supportsAlterTableWithAddConstraint()) { addStatementToSkip("ALTER\\s+TABLE\\s+\\w+\\s+ADD\\s+CONSTRAINT\\s+.*"); } } /** * Returns the connection to the database. * * @return the connection. * @throws SQLException if the connection cannot be obtained. */ protected final Connection getConnection() throws SQLException { return statement.getConnection(); } /** * Adds a statement to skip. By default {@code ScriptRunner} ignores the following statements: * * <ul> * <li>{@code "CREATE TYPE …"} or {@code "CREATE CAST …"} if {@link #isEnumTypeSupported} is {@code false}.</li> * <li>{@code "GRANT USAGE ON SCHEMA …"} if {@link Dialect#supportsGrantUsageOnSchema} is {@code false}.</li> * <li>{@code "GRANT SELECT ON TABLE …"} if {@link Dialect#supportsGrantSelectOnTable} is {@code false}.</li> * <li>{@code "COMMENT ON …"} if {@link Dialect#supportsComment} is {@code false}.</li> * </ul> * * This method can be invoked for ignoring some additional statements. * * @param regex regular expression of the statement to ignore. */ protected final void addStatementToSkip(final String regex) { if (statementsToSkip != null) { throw new IllegalStateException(); } if (regexOfStmtToSkip == null) { regexOfStmtToSkip = new StringBuilder(regex); } else { regexOfStmtToSkip.append('|').append(regex); } } /** * Declares that a word in the <abbr>SQL</abbr> script needs to be replaced by the given word. * The replacement is performed only for occurrences outside quoted identifiers or texts. * For replacement of texts or identifiers, see {@link #editText(StringBuilder, int, int)} * and {@link #editQuotedIdentifier(StringBuilder, int, int)} instead. * * <h4>Example</h4> * This is used for mapping the <abbr>EPSG</abbr> table names from the mixed-cases convention used in * the {@code org.apache.sis.referencing.epsg} module to the lower-cases convention which is actually * used if the user installed the <abbr>EPSG</abbr> database manually (there is more differences than * only the case). * * <p>Another example is the replacement of <abbr>SQL</abbr> keywords used in the scripts by keywords * understood by the database. For example, if a database does not support the {@code "TEXT"} data type, * this method can be used for replacing {@code "TEXT"} by {@code "LONG VARCHAR"}.</p> * * <h4>Limitation</h4> * The {@code inScript} word to replace must be a single word with no space. * If the text to replace contains two words (for example {@code "CREATE TABLE"}), then revert * commit {@code bceb569558bfb7e3cf1a14aaf9261e786db06856} for bringing back this functionality. * * @param inScript the single word in the script which need to be replaced. * @param replacement the word(s) to use instead of {@code inScript} word. */ protected final void addReplacement(final String inScript, final String replacement) { if (replacements.put(inScript, replacement) != null) { throw new IllegalArgumentException(inScript); } } /** * Returns the word to use instead of the given one. * If there is no replacement, then {@code inScript} is returned. * * @param inScript the word in the script which need to be replaced. * @return the word to use instead. */ protected final String getReplacement(final String inScript) { return replacements.getOrDefault(inScript, inScript); } /** * Runs the given SQL script. * Lines are read and grouped up to the terminal {@value #END_OF_STATEMENT} character, then sent to the database. * * @param statement the SQL statements to execute. * @return the number of rows added or modified as a result of the statement execution. * @throws IOException if an error occurred while reading the input (should never happen). * @throws SQLException if an error occurred while executing a SQL statement. */ public final int run(final String statement) throws IOException, SQLException { return run(null, new LineNumberReader(new StringReader(statement))); } /** * Runs the SQL script from the given (filename, input stream) pair. * The file name is used only if an error needs to be reported. * The stream content is presumed encoded in UTF-8 and the stream will be closed by this method. * This method is intended to be invoked by code like this: * * {@snippet lang="java" : * run("myFile.sql", MyClass.getResourceAsStream("myFile.sql")); * } * * <h4>Rational</h4> * Because {@link Class#getResourceAsStream(String)} is caller-sensitive, it must be invoked * from the module containing the resource. Invoking {@code getResourceAsStream(…)} from this * {@code run(…)} method does not work even with a {@link Class} instance passed in argument. * * @param filename name of the SQL script being executed. This is used only for error reporting. * @param in the stream to read. It will be closed by this method. * @return the number of rows added or modified as a result of the statement execution. * @throws IOException if an error occurred while reading the input. * @throws SQLException if an error occurred while executing a SQL statement. */ public final int run(final String filename, final InputStream in) throws IOException, SQLException { if (in == null) { throw new FileNotFoundException(Errors.format(Errors.Keys.FileNotFound_1, filename)); } try (var reader = new LineNumberReader(new InputStreamReader(in, StandardCharsets.UTF_8))) { return run(filename, reader); } } /** * Runs the script from the given reader. Lines are read and grouped up to the * terminal {@value #END_OF_STATEMENT} character, then sent to the database. * Note that contrarily to {@link #run(String, InputStream)}, * this method does <strong>not</strong> close the given reader. * * @param filename name of the SQL script being executed. This is used only for error reporting. * @param in the stream to read. It is caller's responsibility to close this reader. * @return the number of rows added or modified as a result of the script execution. * @throws IOException if an error occurred while reading the input. * @throws SQLException if an error occurred while executing a SQL statement. */ public final int run(final String filename, final BufferedReader in) throws IOException, SQLException { currentFile = filename; currentLine = 0; int statementCount = 0; // For informative purpose only. int posOpeningTextQuote = -1; // -1 if we are not inside a text. int posOpeningIdentifierQuote = -1; final var buffer = new StringBuilder(); final boolean hasReplacements = !replacements.isEmpty(); String line; while ((line = in.readLine()) != null) { currentLine++; /* * Ignore empty lines and comment lines, but only if they appear at the begining of the SQL statement. */ if (buffer.length() == 0) { final int s = CharSequences.skipLeadingWhitespaces(line, 0, line.length()); if (s >= line.length() || line.startsWith(COMMENT, s)) { continue; } if (in instanceof LineNumberReader) { currentLine = ((LineNumberReader) in).getLineNumber(); } } else { buffer.append('\n'); } /* * Copy the current line in the buffer. Then, the loop will search for words or characters to replace * (for example replacements of IDENTIFIER_QUOTE character by the database-specific quote character). * Replacements (if any) will be performed in-place in the buffer. Concequently the buffer length may * vary during the loop execution. */ int pos = buffer.length(); int length = buffer.append(line).length(); parseLine: while (pos < length) { int c = buffer.codePointAt(pos); int n = Character.charCount(c); if ((posOpeningTextQuote & posOpeningIdentifierQuote) < 0) { // True if both positions are -1. int start = pos; while (Character.isUnicodeIdentifierStart(c)) { /* * `start` is the position of the first character of a Unicode identifier. Following loop * sets `pos` to the end (exclusive) of that Unicode identifier. Variable `c` will be set * to the character after the Unicode identifier, provided that we have not reached EOL. */ while ((pos += n) < length) { c = buffer.codePointAt(pos); n = Character.charCount(c); if (!Character.isUnicodeIdentifierPart(c)) break; } /* * Perform in-place replacement if the Unicode identifier is one of the keys listed * in the `replacements` map. This operation may change the buffer length. The `pos` * must be updated if needed for staying at position after the Unicode identifier. */ if (hasReplacements) { final String word = buffer.substring(start, pos); final String replace = replacements.get(word); if (replace != null) { length = buffer.replace(start, pos, replace).length(); pos = start + replace.length(); } } /* * Skip whitespaces and set the `c` variable to the next character, which may be either * another Unicode start (to be processed by the enclosing loop) or another character * (to be processed by the switch statement after the enclosing loop). */ if (pos >= length) break parseLine; while (Character.isWhitespace(c)) { if ((pos += n) >= length) break parseLine; c = buffer.codePointAt(pos); n = Character.charCount(c); } start = pos; } } switch (c) { /* * Found a character for an identifier like "Coordinate Operations". * Check if we have found the opening or the closing character. Then * replace the standard quote character by the database-specific one. */ case IDENTIFIER_QUOTE: { if (posOpeningTextQuote < 0) { length = buffer.replace(pos, pos + n, identifierQuote).length(); n = identifierQuote.length(); if (posOpeningIdentifierQuote < 0) { posOpeningIdentifierQuote = pos; } else { editQuotedIdentifier(buffer, posOpeningIdentifierQuote, pos += n); pos -= length - (length = buffer.length()); posOpeningIdentifierQuote = -1; continue; // Because we already skipped the " character. } } break; } /* * Found a character for a text like 'This is a text'. Check if we have * found the opening or closing character, ignoring the '' escape sequence. */ case QUOTE: { if (posOpeningIdentifierQuote < 0) { if (posOpeningTextQuote < 0) { posOpeningTextQuote = pos; } else if ((pos += n) >= length || buffer.codePointAt(pos) != QUOTE) { editText(buffer, posOpeningTextQuote, pos); pos -= length - (length = buffer.length()); posOpeningTextQuote = -1; continue; // Because we already skipped the ' character. } // else found a double ' character, which means to escape it. } break; } /* * Found the end of statement. Remove that character if it is the last non-white character, * since SQL statement in JDBC are not expected to contain it. */ case END_OF_STATEMENT: { if ((posOpeningTextQuote & posOpeningIdentifierQuote) < 0) { // True if both are -1. if (CharSequences.skipLeadingWhitespaces(buffer, pos + n, length) >= length) { buffer.setLength(pos); } statementCount += execute(buffer); buffer.setLength(0); break parseLine; } break; } } pos += n; } } line = buffer.toString().trim(); if (!line.isEmpty() && !line.startsWith(COMMENT)) { throw new EOFException(Errors.format(Errors.Keys.UnexpectedEndOfString_1, line)); } currentFile = null; return statementCount; } /** * Invoked for each single-quoted text found in a <abbr>SQL</abbr> statement. * The text, <em>including its single quote characters</em>, * is the {@code sql} substring from index {@code lower} inclusive to {@code upper} exclusive. * Subclasses can override this method if they wish to modify the text content. * Modifications are applied directly in the given {@code sql} buffer. * * @param sql the whole SQL statement. * @param lower index of the opening quote character ({@code '}) of the text in {@code sql}. * @param upper index after the closing quote character ({@code '}) of the text in {@code sql}. */ protected void editText(final StringBuilder sql, final int lower, final int upper) { } /** * Invoked for each double-quoted identifier found in a <abbr>SQL</abbr> statement. * The identifier, <em>including its double quote characters</em>, * is the {@code sql} substring from index {@code lower} inclusive to {@code upper} exclusive. * Subclasses can override this method if they wish to modify the identifier. * Modifications are applied directly in the given {@code sql} buffer. * * @param sql the whole SQL statement. * @param lower index of the opening quote character (usually {@code "}) of the identifier in {@code sql}. * @param upper index after the closing quote character (usually {@code "}) of the identifier in {@code sql}. */ protected void editQuotedIdentifier(final StringBuilder sql, final int lower, final int upper) { } /** * Returns {@code true} if the given fragment seems outside identifier quotes or text quotes. * The given fragment must be the beginning or the end of an SQL statement, or be bounded by * indices that are known to be outside quotes. The implementation counts the occurrences of * {@value #IDENTIFIER_QUOTE} and {@value #QUOTE} and verifies that both of them are even. * * @param sql the SQL statement for which to test if a fragment is outside quotes. * @param from index of the first character of the fragment. * @param to index after the last character of the fragment. * @return whether the given fragment seems outside quotes. */ private static boolean isOutsideQuotes(final CharSequence sql, int from, final int to) { int nq = 0, ni = 0; while (from < to) { switch (sql.charAt(from++)) { case IDENTIFIER_QUOTE: { ni++; break; } case QUOTE: { if ((nq & 1) != 0 && from < to && sql.charAt(from) == QUOTE) { from++; } else { nq++; } break; } } } return ((nq | ni) & 1) == 0; } /** * Returns {@code true} if the given SQL statements is supported by the database engine, * or {@code false} if this statement should be ignored. The default implementation checks * if the given query matches the regular expressions given to {@link #addStatementToSkip(String)}. * * <p>This method is only a hint; a value of {@code true} is not a guaranteed that the given * SQL statement is valid.</p> * * @param sql the SQL statement to verify. * @return whether the given SQL statement is supported by the database engine. */ protected boolean isSupported(final CharSequence sql) { if (statementsToSkip != null) { return !statementsToSkip.reset(sql).matches(); } else if (regexOfStmtToSkip != null) { // We do not use Pattern.CASE_INSENTITIVE for performance reasons. statementsToSkip = Pattern.compile(regexOfStmtToSkip.toString(), Pattern.DOTALL).matcher(sql); regexOfStmtToSkip = null; return !statementsToSkip.matches(); } else { return true; } } /** * Executes the given SQL statement. * This method performs the following choices: * * <ul> * <li>If {@link #isSupported(CharSequence)} returns {@code false}, then this method does nothing.</li> * <li>If the statement is {@code CREATE TABLE ... INHERITS ...} but the database does not support * table inheritance, then this method drops the {@code INHERITS ...} part.</li> * <li>If the {@code maxRowsPerInsert} argument given at construction time was zero, * then this method skips {@code "INSERT INTO"} statements but executes all other.</li> * <li>Otherwise this method executes the given statement with the following modification: * if the statement is an {@code "INSERT INTO"} with many values, then this method may break * that statement into many {@code "INSERT INTO"} where each statements does not have move * than {@code maxRowsPerInsert} rows.</li> * </ul> * * Subclasses that override this method can freely edit the {@link StringBuilder} content before * to invoke this method. * * @param sql the SQL statement to execute. * @return the number of rows added or modified as a result of the statement execution. * @throws SQLException if an error occurred while executing the SQL statement. * @throws IOException if an I/O operation was required and failed. */ protected int execute(final StringBuilder sql) throws SQLException, IOException { if (!isSupported(sql)) { return 0; } String subSQL = currentSQL = CharSequences.trimWhitespaces(sql).toString(); if (!dialect.supportsTableInheritance() && subSQL.startsWith("CREATE TABLE")) { final int s = sql.lastIndexOf("INHERITS"); if (s >= 0 && isOutsideQuotes(sql, s+8, sql.length())) { // 8 is the length of "INHERITS". sql.setLength(CharSequences.skipTrailingWhitespaces(sql, 0, s)); subSQL = currentSQL = sql.toString(); } } int count = 0; /* * The scripts usually do not contain any SELECT statement. One exception is the creation * of geometry columns in a PostGIS database, which use "SELECT AddGeometryColumn(…)". */ if (subSQL.startsWith(SQLBuilder.SELECT)) { statement.executeQuery(subSQL).close(); } else { if (maxRowsPerInsert != Integer.MAX_VALUE && subSQL.startsWith("INSERT INTO")) { if (maxRowsPerInsert == 0) { subSQL = null; // Skip completely the "INSERT INTO" statement. } else { int endOfLine = subSQL.indexOf('\n', 11); // 11 is the length of "INSERT INTO". if (subSQL.startsWith("VALUES", endOfLine - 6)) { /* * The following code is very specific to the syntax of the scripts generated by SIS. * This code fetches the "INSERT INTO" part, which is expected to be on its own line. * We will left this part of the buffer unchanged and write only after the offset. */ sql.setLength(0); // Rewrite from the beginning in case we trimmed whitespaces. final int startOfValues = sql.append(subSQL, 0, endOfLine).append(' ').length(); int nrows = maxRowsPerInsert; int begin = endOfLine + 1; while ((endOfLine = subSQL.indexOf('\n', ++endOfLine)) >= 0) { if (--nrows == 0) { // Extract lines until we have reached the `maxRowsPerInsert` amount. int end = endOfLine; if (subSQL.charAt(end - 1) == ',') { end--; } count += statement.executeUpdate(currentSQL = sql.append(subSQL, begin, end).toString()); sql.setLength(startOfValues); // Prepare for next INSERT INTO statement. nrows = maxRowsPerInsert; begin = endOfLine + 1; } } // The remaining of the statement to be executed. int end = CharSequences.skipTrailingWhitespaces(subSQL, begin, subSQL.length()); currentSQL = subSQL = (end > begin) ? sql.append(subSQL, begin, end).toString() : null; } } } if (subSQL != null) { count += statement.executeUpdate(subSQL); } } currentSQL = null; // Clear on success only. return count; } /** * Closes the statement used by this runner. Note that this method does not close the connection * given to the constructor; this connection still needs to be closed explicitly by the caller. * * @throws SQLException if an error occurred while closing the statement. */ @Override public void close() throws SQLException { statement.close(); } /** * Returns the current position (current file and current line in that file). The returned string may also contain * the SQL statement under execution. The main purpose of this method is to provide information about the position * where an exception occurred. * * @param locale the locale for the message to return. * @return a string representation of the current position, or {@code null} if unknown. */ public String status(final Locale locale) { String position = null; if (currentFile != null) { position = Errors.forLocale(locale).getString(Errors.Keys.ErrorInFileAtLine_2, currentFile, currentLine); } if (currentSQL != null) { final var buffer = new StringBuilder(); if (position != null) { buffer.append(position).append('\n'); } position = buffer.append("SQL: ").append(currentSQL).toString(); } return position; } /** * Returns a string representation of this runner for debugging purpose. * * @return a string representation for debugging purpose. */ @Override public String toString() { return Strings.toString(getClass(), "status", status(null)); } }
apache/geode
36,385
geode-core/src/main/java/org/apache/geode/internal/cache/partitioned/rebalance/model/PartitionedRegionLoadModel.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache.partitioned.rebalance.model; import java.util.Collection; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import org.apache.logging.log4j.Logger; import org.apache.geode.annotations.Immutable; import org.apache.geode.cache.partition.PartitionMemberInfo; import org.apache.geode.distributed.internal.membership.InternalDistributedMember; import org.apache.geode.internal.Assert; import org.apache.geode.internal.cache.FixedPartitionAttributesImpl; import org.apache.geode.internal.cache.PartitionedRegion; import org.apache.geode.internal.cache.partitioned.InternalPartitionDetails; import org.apache.geode.internal.cache.partitioned.OfflineMemberDetails; import org.apache.geode.internal.cache.partitioned.PRLoad; import org.apache.geode.internal.cache.partitioned.PartitionMemberInfoImpl; import org.apache.geode.internal.cache.partitioned.rebalance.BucketOperator; import org.apache.geode.internal.cache.persistence.PersistentMemberID; import org.apache.geode.logging.internal.log4j.api.LogService; /** * A model of the load on all of the members for a partitioned region. This model is used to find * the best members to create buckets on or move buckets or primaries too. All of the actual work of * creating a copy, moving a primary, etc. Is performed by the BucketOperator that is passed to the * constructor. * * To use, create a model and populate it using the addMember method. addMember takes a region * argument, to indicate which region the data is for. All of the regions added to a single model * are assumed to be colocated, and the model adds together the load from each of the individual * regions to balance all of the regions together. * * Rebalancing operations are performed by repeatedly calling model.nextStep until it returns false. * Each call to nextStep should perform another operation. The model will make callbacks to the * BucketOperator you provide to the contructor perform the actual create or move. * * While creating redundant copies our moving buckets, this model tries to minimize the standard * deviation in the weighted loads for the members. The weighted load for the member is the sum of * the load for all of the buckets on the member divided by that members weight. * * This model is not threadsafe. * * @since GemFire 6.0 */ @SuppressWarnings("synthetic-access") public class PartitionedRegionLoadModel { private static final Logger logger = LogService.getLogger(); /** * A comparator that is used to sort buckets in the order that we should satisfy redundancy - most * needy buckets first. */ @Immutable private static final Comparator<Bucket> REDUNDANCY_COMPARATOR = (o1, o2) -> { // put the buckets with the lowest redundancy first int result = o1.getRedundancy() - o2.getRedundancy(); if (result == 0) { // put the bucket with the largest load first. This should give us a // better chance of finding a place to put it result = Float.compare(o2.getLoad(), o1.getLoad()); } if (result == 0) { // finally, just use the id so the comparator doesn't swallow buckets // with the same load result = o1.getId() - o2.getId(); } return result; }; private static final long MEGABYTES = 1024 * 1024; /** * A member to represent inconsistent data. For example, if two members think they are the primary * for a bucket, we will set the primary to invalid, so it won't be a candidate for rebalancing. */ @Immutable public static final MemberRollup INVALID_MEMBER = new MemberRollup(null, null, false, false); private final BucketRollup[] buckets; /** * A map of all members that host this partitioned region */ private final Map<InternalDistributedMember, MemberRollup> members = new HashMap<>(); /** * The set of all regions that are colocated in this model. */ private final Set<String> allColocatedRegions = new HashSet<>(); /** * The list of buckets that have low redundancy */ private final SortedSet<BucketRollup> lowRedundancyBuckets = new TreeSet<>(REDUNDANCY_COMPARATOR); private SortedSet<BucketRollup> overRedundancyBuckets = null; private final Collection<Move> attemptedPrimaryMoves = new HashSet<>(); private final Collection<Move> attemptedBucketMoves = new HashSet<>(); private final Collection<Move> attemptedBucketCreations = new HashSet<>(); private final Collection<Move> attemptedBucketRemoves = new HashSet<>(); private final BucketOperator operator; private final int requiredRedundancy; /** * The average primary load on a member */ private float primaryAverage = -1; /** * The average bucket load on a member */ private float averageLoad = -1; /** * The minimum improvement in variance that we'll consider worth moving a primary */ private double minPrimaryImprovement = -1; /** * The minimum improvement in variance that we'll consider worth moving a bucket */ private double minImprovement = -1; private final AddressComparor addressComparor; private final Set<InternalDistributedMember> criticalMembers; private final PartitionedRegion partitionedRegion; /** * Create a new model * * @param operator the operator which performs the actual creates/moves for buckets * @param redundancyLevel The expected redundancy level for the region */ public PartitionedRegionLoadModel(BucketOperator operator, int redundancyLevel, int numBuckets, AddressComparor addressComparor, Set<InternalDistributedMember> criticalMembers, PartitionedRegion region) { this.operator = operator; requiredRedundancy = redundancyLevel; buckets = new BucketRollup[numBuckets]; this.addressComparor = addressComparor; this.criticalMembers = criticalMembers; partitionedRegion = region; } /** * Add a region to the model. All regions that are added are assumed to be colocated. The first * region added to the model should be the parent region. The parent region is expected to have at * least as many members as child regions; it may have more. If the parent has more members than * child regions those members will be considered invalid. * * @param memberDetailSet A set of details about each member. */ public void addRegion(String region, Collection<? extends InternalPartitionDetails> memberDetailSet, OfflineMemberDetails offlineDetails, boolean enforceLocalMaxMemory) { allColocatedRegions.add(region); // build up a list of members and an array of buckets for this // region. Each bucket has a reference to all of the members // that host it and each member has a reference to all of the buckets // it hosts Map<InternalDistributedMember, Member> regionMember = new HashMap<>(); Bucket[] regionBuckets = new Bucket[buckets.length]; for (InternalPartitionDetails memberDetails : memberDetailSet) { InternalDistributedMember memberId = (InternalDistributedMember) memberDetails.getDistributedMember(); boolean isCritical = criticalMembers.contains(memberId); Member member = new Member(addressComparor, memberId, memberDetails.getPRLoad().getWeight(), memberDetails.getConfiguredMaxMemory(), isCritical, enforceLocalMaxMemory); regionMember.put(memberId, member); PRLoad load = memberDetails.getPRLoad(); for (int i = 0; i < regionBuckets.length; i++) { if (load.getReadLoad(i) > 0) { Bucket bucket = regionBuckets[i]; if (bucket == null) { Set<PersistentMemberID> offlineMembers = offlineDetails.getOfflineMembers(i); bucket = new Bucket(i, load.getReadLoad(i), memberDetails.getBucketSize(i), offlineMembers); regionBuckets[i] = bucket; } bucket.addMember(member); if (load.getWriteLoad(i) > 0) { if (bucket.getPrimary() == null) { bucket.setPrimary(member, load.getWriteLoad(i)); } else if (!bucket.getPrimary().equals(member)) { bucket.setPrimary(INVALID_MEMBER, 1); } } } } } // add each member for this region to a rollup of all colocated // regions for (Member member : regionMember.values()) { InternalDistributedMember memberId = member.getDistributedMember(); MemberRollup memberSum = members.get(memberId); boolean isCritical = criticalMembers.contains(memberId); if (memberSum == null) { memberSum = new MemberRollup(addressComparor, memberId, isCritical, enforceLocalMaxMemory); members.put(memberId, memberSum); } memberSum.addColocatedMember(region, member); } // Now, add the region to the rollups of the colocated // regions and buckets for (int i = 0; i < buckets.length; i++) { if (regionBuckets[i] == null) { // do nothing, this bucket is not hosted for this region. // [sumedh] remove from buckets array too to be consistent since // this method will be invoked repeatedly for all colocated regions, // and then we may miss some colocated regions for a bucket leading // to all kinds of issues later buckets[i] = null; continue; } if (buckets[i] == null) { // If this is the first region we have seen that is hosting this bucket, create a bucket // rollup buckets[i] = new BucketRollup(i); } // Add all of the members hosting the bucket to the rollup for (Member member : regionBuckets[i].getMembersHosting()) { InternalDistributedMember memberId = member.getDistributedMember(); buckets[i].addMember(members.get(memberId)); } // set the primary for the rollup if (regionBuckets[i].getPrimary() != null) { if (buckets[i].getPrimary() == null) { InternalDistributedMember memberId = regionBuckets[i].getPrimary().getDistributedMember(); buckets[i].setPrimary(members.get(memberId), 0); } else { if (!(buckets[i].getPrimary() == INVALID_MEMBER)) { if (!buckets[i].getPrimary().getDistributedMember() .equals(regionBuckets[i].getPrimary().getDistributedMember())) { if (logger.isDebugEnabled()) { logger.debug( "PartitionedRegionLoadModel - Setting bucket {} to INVALID because it is the primary on two members.This could just be a race in the collocation of data. member1={} member2={}", buckets[i], buckets[i].getPrimary(), regionBuckets[i].getPrimary()); } buckets[i].setPrimary(INVALID_MEMBER, 0); } } } } buckets[i].addColocatedBucket(region, regionBuckets[i]); } // TODO rebalance - there is a possibility of adding members // back here, which I don't like. I think maybe all of the regions should be in the // constructor for the load model, and then when the constructor is done // we can do with validation. // If any members don't have this new region, remove them. for (Iterator<Entry<InternalDistributedMember, MemberRollup>> itr = members.entrySet().iterator(); itr.hasNext();) { MemberRollup memberRollup = itr.next().getValue(); if (!memberRollup.getColocatedMembers().keySet().equals(allColocatedRegions)) { itr.remove(); if (logger.isDebugEnabled()) { logger.debug( "PartitionedRegionLoadModel - removing member {} from the consideration because it doesn't have all of the colocated regions. Expected={}, was={}", memberRollup, allColocatedRegions, memberRollup.getColocatedMembers()); } // This state should never happen if (!memberRollup.getBuckets().isEmpty()) { logger.warn( "PartitionedRegionLoadModel - member {} has incomplete colocation, but it has buckets for some regions. Should have colocated regions {} but had {} and contains buckets {}", new Object[] {memberRollup, allColocatedRegions, memberRollup.getColocatedMembers().keySet(), memberRollup.getBuckets()}); } for (Bucket bucket : new HashSet<>(memberRollup.getBuckets())) { bucket.removeMember(memberRollup); } } } } public void initialize() { resetAverages(); identifyOverRedundantBuckets(); initLowRedundancyBuckets(); } public SortedSet<BucketRollup> getLowRedundancyBuckets() { return lowRedundancyBuckets; } public SortedSet<BucketRollup> getOverRedundancyBuckets() { return overRedundancyBuckets; } public boolean enforceUniqueZones() { return addressComparor.enforceUniqueZones(); } public void ignoreLowRedundancyBucket(BucketRollup first) { lowRedundancyBuckets.remove(first); } public void ignoreOverRedundancyBucket(BucketRollup first) { overRedundancyBuckets.remove(first); } public MemberRollup getMember(InternalDistributedMember target) { return members.get(target); } public BucketRollup[] getBuckets() { return buckets; } public String getName() { return getPartitionedRegion().getFullPath(); } public PartitionedRegion getPartitionedRegion() { // TODO - this model really should not have // a reference to the partitioned region object. // The fixed PR code currently depends on this // partitioned region object and needs // refactoring. return partitionedRegion; } private Map<String, Long> getColocatedRegionSizes(BucketRollup bucket) { Map<String, Long> colocatedRegionSizes = new HashMap<>(); for (Map.Entry<String, Bucket> entry : bucket.getColocatedBuckets().entrySet()) { colocatedRegionSizes.put(entry.getKey(), entry.getValue().getBytes()); } return colocatedRegionSizes; } /** * Trigger the creation of a redundant bucket, potentially asynchronously. * * This method will find the best node to create a redundant bucket and invoke the bucket operator * to create a bucket on that node. Because the bucket operator is asynchronous, the bucket may * not be created immediately, but the model will be updated regardless. Invoke * {@link #waitForOperations()} to wait for those operations to actually complete * * @param bucket the bucket for which a redundant copy should be made * @param targetMember the member on which a redundant copy of a bucket should be made */ public void createRedundantBucket(final BucketRollup bucket, final Member targetMember) { Map<String, Long> colocatedRegionSizes = getColocatedRegionSizes(bucket); final Move move = new Move(null, targetMember, bucket); lowRedundancyBuckets.remove(bucket); bucket.addMember(targetMember); // put the bucket back into the list if we still need to satisfy redundancy for // this bucket if (bucket.getRedundancy() < requiredRedundancy) { lowRedundancyBuckets.add(bucket); } resetAverages(); operator.createRedundantBucket(targetMember.getMemberId(), bucket.getId(), colocatedRegionSizes, new BucketOperator.Completion() { @Override public void onSuccess() {} @Override public void onFailure() { // If the bucket creation failed, we need to undo the changes // we made to the model attemptedBucketCreations.add(move); // remove the bucket from lowRedundancyBuckets before mutating the state lowRedundancyBuckets.remove(bucket); bucket.removeMember(targetMember); if (bucket.getRedundancy() < requiredRedundancy) { lowRedundancyBuckets.add(bucket); } resetAverages(); } }); } public void remoteOverRedundancyBucket(BucketRollup bucket, Member targetMember) { Move bestMove = new Move(null, targetMember, bucket); Map<String, Long> colocatedRegionSizes = getColocatedRegionSizes(bucket); if (!operator.removeBucket(targetMember.getMemberId(), bucket.getId(), colocatedRegionSizes)) { attemptedBucketRemoves.add(bestMove); } else { overRedundancyBuckets.remove(bucket); bucket.removeMember(targetMember); // put the bucket back into the list if we still need to satisfy redundancy for // this bucket if (bucket.getOnlineRedundancy() > requiredRedundancy) { overRedundancyBuckets.add(bucket); } resetAverages(); } } private void initLowRedundancyBuckets() { for (BucketRollup b : buckets) { if (b != null && b.getRedundancy() >= 0 && b.getRedundancy() < requiredRedundancy) { lowRedundancyBuckets.add(b); } } } /** * Original functionality if bucket's redundancy is greater than what is necessary add it to the * over redundancy bucket list, so it can be cleared * <p> * Newly added functionality is to make this so that we don't have a bucket in the same redundancy * zone twice if zones are in use. */ private void identifyOverRedundantBuckets() { overRedundancyBuckets = new TreeSet<>(REDUNDANCY_COMPARATOR); // For every bucket for (BucketRollup b : buckets) { if (b != null) { // check to see if the existing redundancy is greater than required if (b.getOnlineRedundancy() > requiredRedundancy) { // if so, add the bucket to the over redundancy list overRedundancyBuckets.add(b); } else { // figure out if we have over redundancy in a zone by having two members hosting a // bucket that should be spread across zones. determineOverRedundancyInZones(b); } } } } /** * Determine if the passed in bucket is on more than one member in a zone and mark it as * overredundant. If by marking a bucket over redundant, that would make the redundancy * insufficient, add the bucket to lowRedundancy as well so a member in a different zone * can host it. * * @param bucketRollup the BucketRollup that we are checking */ private void determineOverRedundancyInZones( BucketRollup bucketRollup) { Set<String> redundancyZonesFound = new HashSet<>(); // for each member that is hosting the bucket for (Member member : bucketRollup.getMembersHosting()) { // get the redundancy zone of the member String redundancyZone = getRedundancyZone(member.getDistributedMember()); if (redundancyZone != null) { // if the redundancy zone is already in the list if (redundancyZonesFound.contains(redundancyZone)) { // add the bucket to the over redundancy list because we have more than one member // with this bucket in the same zone. something we don't prefer with multiple zones overRedundancyBuckets.add(bucketRollup); if (bucketRollup.getOnlineRedundancy() - 1 < bucketRollup.getRedundancy()) { lowRedundancyBuckets.add(bucketRollup); } } else { // otherwise add the redundancy zone to the list of redundancy zones redundancyZonesFound.add(redundancyZone); } } } } /** * Find the best member to put a new bucket on. * * @param bucket the bucket we want to create * @param checkIPAddress true if we should only consider members that do not have the same IP * Address as a member that already hosts the bucket */ public Move findBestTarget(Bucket bucket, boolean checkIPAddress) { float leastCost = Float.MAX_VALUE; Move bestMove = null; for (Member member : members.values()) { if (member.willAcceptBucket(bucket, null, checkIPAddress).willAccept()) { float cost = (member.getTotalLoad() + bucket.getLoad()) / member.getWeight(); if (cost < leastCost) { Move move = new Move(null, member, bucket); if (!attemptedBucketCreations.contains(move)) { leastCost = cost; bestMove = move; } } } } return bestMove; } String getRedundancyZone(InternalDistributedMember memberID) { return partitionedRegion.getDistributionManager().getRedundancyZone(memberID); } /** * Look for a zone that has more than one member represented. That is where we want to try to * delete first. * * @param members set of members that are hosting this bucket * @return null or a string containing the zone to delete a member from */ Set<String> getPreferredDeletionZone(Set<Member> members) { Set<String> distributionSet = new HashSet<>(); Set<String> zonesToDeleteFrom = new HashSet<>(); // for each member for (Member member : members) { // get the redundancy zone String zoneName = getRedundancyZone(member.getMemberId()); // See if the zoneName is in the list if (distributionSet.contains(zoneName)) { // We have two members so that means it is preferred zonesToDeleteFrom.add(zoneName); } else { // first occurrence of the zone name in the set distributionSet.add(zoneName); } } // return nothing if this is the first member // because there is no preferred deletion zone yet. return zonesToDeleteFrom; } /** * Find the best member to remove a bucket from * * @param bucket the bucket we want to create */ public Move findBestRemove(Bucket bucket) { float mostLoaded = Float.MIN_VALUE; Move bestMove = null; Set<Member> members = bucket.getMembersHosting(); // Check to see if we have a preferred redundancy zone to delete from for this bucket. // We prefer deleting copies in the same redundancy zone Set<String> zones = getPreferredDeletionZone(members); for (Member member : members) { // if this load is lower than then highest load, we prefer the deleting from high // load servers so move on. If this member is the bucket primary, we prefer not to move // primaries, so move on. float newLoad = (member.getTotalLoad() - bucket.getLoad()) / member.getWeight(); if (newLoad <= mostLoaded || member.equals(bucket.getPrimary())) { continue; } // if we have a preferred redundancy zone to delete from if (!zones.isEmpty()) { // leave the bucket on this member whose zone is not in the list if (!zones.contains(getRedundancyZone(member.getMemberId()))) { continue; } } // Since this bucket is an extra copy for the zone, we should remove it. // if the attemptedBucketRemovesList contains this move, then we don't need to add it // again. Move move = new Move(null, member, bucket); if (attemptedBucketRemoves.contains(move)) { continue; } mostLoaded = newLoad; bestMove = move; } return bestMove; } public Move findBestTargetForFPR(Bucket bucket, boolean checkIPAddress) { InternalDistributedMember targetMemberID; Member targetMember; List<FixedPartitionAttributesImpl> fpas = partitionedRegion.getFixedPartitionAttributesImpl(); if (fpas != null) { for (FixedPartitionAttributesImpl fpaImpl : fpas) { if (fpaImpl.hasBucket(bucket.getId())) { targetMemberID = partitionedRegion.getDistributionManager().getDistributionManagerId(); if (members.containsKey(targetMemberID)) { targetMember = members.get(targetMemberID); if (targetMember.willAcceptBucket(bucket, null, checkIPAddress).willAccept()) { // We should have just one move for creating // all the buckets for a FPR on this node. return new Move(null, targetMember, bucket); } } } } } return null; } public boolean movePrimary(Move bestMove) { Member bestSource = bestMove.getSource(); Member bestTarget = bestMove.getTarget(); Bucket bestBucket = bestMove.getBucket(); boolean successfulMove = operator.movePrimary(bestSource.getDistributedMember(), bestTarget.getDistributedMember(), bestBucket.getId()); if (successfulMove) { bestBucket.setPrimary(bestTarget, bestBucket.getPrimaryLoad()); } boolean entryAdded = attemptedPrimaryMoves.add(bestMove); Assert.assertTrue(entryAdded, "PartitionedRegionLoadModel.movePrimarys - excluded set is not growing, so we probably would have an infinite loop here"); return successfulMove; } public Move findBestPrimaryMove() { Move bestMove = null; double bestImprovement = 0; for (Member source : members.values()) { for (Bucket bucket : source.getPrimaryBuckets()) { for (Member target : bucket.getMembersHosting()) { if (source.equals(target)) { continue; } double improvement = improvement(source.getPrimaryLoad(), source.getWeight(), target.getPrimaryLoad(), target.getWeight(), bucket.getPrimaryLoad(), getPrimaryAverage()); if (improvement > bestImprovement && improvement > getMinPrimaryImprovement()) { Move move = new Move(source, target, bucket); if (!attemptedPrimaryMoves.contains(move)) { bestImprovement = improvement; bestMove = move; } } } } } return bestMove; } /** * Calculate the target weighted number of primaries on each node. */ private float getPrimaryAverage() { if (primaryAverage == -1) { float totalWeight = 0; float totalPrimaryCount = 0; for (Member member : members.values()) { totalPrimaryCount += member.getPrimaryLoad(); totalWeight += member.getWeight(); } primaryAverage = totalPrimaryCount / totalWeight; } return primaryAverage; } /** * Calculate the target weighted amount of data on each node. */ private float getAverageLoad() { if (averageLoad == -1) { float totalWeight = 0; float totalLoad = 0; for (Member member : members.values()) { totalLoad += member.getTotalLoad(); totalWeight += member.getWeight(); } averageLoad = totalLoad / totalWeight; } return averageLoad; } /** * Calculate the minimum improvement in variance that will we consider worth while. Currently this * is calculated as the improvement in variance that would occur by removing the smallest bucket * from the member with the largest weight. */ private double getMinPrimaryImprovement() { if ((minPrimaryImprovement + 1.0) < .0000001) { // i.e. == -1 float largestWeight = 0; float smallestBucket = 0; for (Member member : members.values()) { if (member.getWeight() > largestWeight) { largestWeight = member.getWeight(); } for (Bucket bucket : member.getPrimaryBuckets()) { if (bucket.getPrimaryLoad() < smallestBucket || smallestBucket == 0) { smallestBucket = bucket.getPrimaryLoad(); } } } double before = variance(getPrimaryAverage() * largestWeight + smallestBucket, largestWeight, getPrimaryAverage()); double after = variance(getPrimaryAverage() * largestWeight, largestWeight, getPrimaryAverage()); minPrimaryImprovement = (before - after) / smallestBucket; } return minPrimaryImprovement; } /** * Calculate the minimum improvement in variance that will we consider worth while. Currently this * is calculated as the improvement in variance that would occur by removing the smallest bucket * from the member with the largest weight. */ private double getMinImprovement() { if ((minImprovement + 1.0) < .0000001) { // i.e. == -1 float largestWeight = 0; float smallestBucket = 0; for (Member member : members.values()) { if (member.getWeight() > largestWeight) { largestWeight = member.getWeight(); } // find the smallest bucket, ignoring empty buckets. for (Bucket bucket : member.getBuckets()) { if (smallestBucket == 0 || (bucket.getLoad() < smallestBucket && bucket.getBytes() > 0)) { smallestBucket = bucket.getLoad(); } } } double before = variance(getAverageLoad() * largestWeight + smallestBucket, largestWeight, getAverageLoad()); double after = variance(getAverageLoad() * largestWeight, largestWeight, getAverageLoad()); minImprovement = (before - after) / smallestBucket; } return minImprovement; } private void resetAverages() { primaryAverage = -1; averageLoad = -1; minPrimaryImprovement = -1; minImprovement = -1; } /** * Calculate how much the variance in load will decrease for a given move. * * @param sLoad the current load on the source member * @param sWeight the weight of the source member * @param tLoad the current load on the target member * @param tWeight the weight of the target member * @param bucketSize the size of the bucket we're considering moving * @param average the target weighted load for all members. * @return the change in variance that would occur by making this move. Essentially * variance_before - variance_after, so a positive change is a means the variance is * decreasing. */ private double improvement(float sLoad, float sWeight, float tLoad, float tWeight, float bucketSize, float average) { double vSourceBefore = variance(sLoad, sWeight, average); double vSourceAfter = variance(sLoad - bucketSize, sWeight, average); double vTargetBefore = variance(tLoad, tWeight, average); double vTargetAfter = variance(tLoad + bucketSize, tWeight, average); double improvement = vSourceBefore - vSourceAfter + vTargetBefore - vTargetAfter; return improvement / bucketSize; } private double variance(double load, double weight, double average) { double deviation = (load / weight - average); return deviation * deviation; } public Move findBestBucketMove() { Move bestMove = null; double bestImprovement = 0; for (Member source : members.values()) { for (Bucket bucket : source.getBuckets()) { for (Member target : members.values()) { if (bucket.getMembersHosting().contains(target)) { continue; } if (!target.willAcceptBucket(bucket, source, true).willAccept()) { continue; } double improvement = improvement(source.getTotalLoad(), source.getWeight(), target.getTotalLoad(), target.getWeight(), bucket.getLoad(), getAverageLoad()); if (improvement > bestImprovement && improvement > getMinImprovement()) { Move move = new Move(source, target, bucket); if (!attemptedBucketMoves.contains(move)) { bestImprovement = improvement; bestMove = move; } } } } } return bestMove; } public boolean moveBucket(Move bestMove) { Member bestSource = bestMove.getSource(); Member bestTarget = bestMove.getTarget(); BucketRollup bestBucket = (BucketRollup) bestMove.getBucket(); Map<String, Long> colocatedRegionSizes = getColocatedRegionSizes(bestBucket); boolean successfulMove = operator.moveBucket(bestSource.getDistributedMember(), bestTarget.getDistributedMember(), bestBucket.getId(), colocatedRegionSizes); if (successfulMove) { bestBucket.addMember(bestTarget); if (bestSource.equals(bestBucket.getPrimary())) { bestBucket.setPrimary(bestTarget, bestBucket.getPrimaryLoad()); } bestBucket.removeMember(bestSource); } boolean entryAdded = attemptedBucketMoves.add(bestMove); Assert.assertTrue(entryAdded, "PartitionedRegionLoadModel.moveBuckets - excluded set is not growing, so we probably would have an infinite loop here"); return successfulMove; } /** * Return a snapshot of what the partitioned member details look like. * * @return a set of partitioned member details. */ public Set<PartitionMemberInfo> getPartitionedMemberDetails(String region) { TreeSet<PartitionMemberInfo> result = new TreeSet<>(); for (MemberRollup member : members.values()) { Member colocatedMember = member.getColocatedMember(region); if (colocatedMember != null) { result.add(new PartitionMemberInfoImpl(colocatedMember.getDistributedMember(), colocatedMember.getConfiguredMaxMemory(), colocatedMember.getSize(), colocatedMember.getBucketCount(), colocatedMember.getPrimaryCount())); } } return result; } /** * For testing only, calculate the total variance of the members */ public double getVarianceForTest() { double variance = 0; for (Member member : members.values()) { variance += variance(member.getTotalLoad(), member.getWeight(), getAverageLoad()); } return variance; } /** * For testing only, calculate the total variance of the members */ public double getPrimaryVarianceForTest() { double variance = 0; for (Member member : members.values()) { variance += variance(member.getPrimaryLoad(), member.getWeight(), getPrimaryAverage()); } return variance; } /** * Wait for the bucket operator to complete any pending asynchronous operations. */ public void waitForOperations() { operator.waitForOperations(); } @Override public String toString() { StringBuilder result = new StringBuilder(); TreeSet<Bucket> allBucketIds = new TreeSet<>(Comparator.comparingInt(Bucket::getId)); if (members.isEmpty()) { return ""; } int longestMemberId = 0; for (Member member : members.values()) { allBucketIds.addAll(member.getBuckets()); int memberIdLength = member.getDistributedMember().toString().length(); if (longestMemberId < memberIdLength) { longestMemberId = memberIdLength; } } result .append(String.format("%" + longestMemberId + "s primaries size(MB) max(MB)", "MemberId")); for (Bucket bucket : allBucketIds) { result.append(String.format("%4s", bucket.getId())); } for (Member member : members.values()) { result.append(String.format("\n%" + longestMemberId + "s %9.0f %8.2f %8.2f", member.getDistributedMember(), member.getPrimaryLoad(), member.getSize() / (float) MEGABYTES, member.getConfiguredMaxMemory() / (float) MEGABYTES)); for (Bucket bucket : allBucketIds) { char symbol; if (member.getPrimaryBuckets().contains(bucket)) { symbol = 'P'; } else if (member.getBuckets().contains(bucket)) { symbol = 'R'; } else { symbol = 'X'; } result.append(" ").append(symbol); } } result.append(String.format("\n%" + longestMemberId + "s ", "#offline")); for (Bucket bucket : allBucketIds) { result.append(String.format("%4s", bucket.getOfflineMembers().size())); } return result.toString(); } }
googleapis/google-cloud-java
36,151
java-tasks/proto-google-cloud-tasks-v2beta2/src/main/java/com/google/cloud/tasks/v2beta2/ListTasksResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/tasks/v2beta2/cloudtasks.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.tasks.v2beta2; /** * * * <pre> * Response message for listing tasks using * [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. * </pre> * * Protobuf type {@code google.cloud.tasks.v2beta2.ListTasksResponse} */ public final class ListTasksResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.tasks.v2beta2.ListTasksResponse) ListTasksResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListTasksResponse.newBuilder() to construct. private ListTasksResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListTasksResponse() { tasks_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListTasksResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.tasks.v2beta2.CloudTasksProto .internal_static_google_cloud_tasks_v2beta2_ListTasksResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.tasks.v2beta2.CloudTasksProto .internal_static_google_cloud_tasks_v2beta2_ListTasksResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.tasks.v2beta2.ListTasksResponse.class, com.google.cloud.tasks.v2beta2.ListTasksResponse.Builder.class); } public static final int TASKS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.tasks.v2beta2.Task> tasks_; /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta2.Task tasks = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.tasks.v2beta2.Task> getTasksList() { return tasks_; } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta2.Task tasks = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.tasks.v2beta2.TaskOrBuilder> getTasksOrBuilderList() { return tasks_; } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta2.Task tasks = 1;</code> */ @java.lang.Override public int getTasksCount() { return tasks_.size(); } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta2.Task tasks = 1;</code> */ @java.lang.Override public com.google.cloud.tasks.v2beta2.Task getTasks(int index) { return tasks_.get(index); } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta2.Task tasks = 1;</code> */ @java.lang.Override public com.google.cloud.tasks.v2beta2.TaskOrBuilder getTasksOrBuilder(int index) { return tasks_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve next page of results. * * To return the next page of results, call * [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks] with this * value as the * [page_token][google.cloud.tasks.v2beta2.ListTasksRequest.page_token]. * * If the next_page_token is empty, there are no more results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token to retrieve next page of results. * * To return the next page of results, call * [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks] with this * value as the * [page_token][google.cloud.tasks.v2beta2.ListTasksRequest.page_token]. * * If the next_page_token is empty, there are no more results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < tasks_.size(); i++) { output.writeMessage(1, tasks_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < tasks_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, tasks_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.tasks.v2beta2.ListTasksResponse)) { return super.equals(obj); } com.google.cloud.tasks.v2beta2.ListTasksResponse other = (com.google.cloud.tasks.v2beta2.ListTasksResponse) obj; if (!getTasksList().equals(other.getTasksList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getTasksCount() > 0) { hash = (37 * hash) + TASKS_FIELD_NUMBER; hash = (53 * hash) + getTasksList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.tasks.v2beta2.ListTasksResponse parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.tasks.v2beta2.ListTasksResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.tasks.v2beta2.ListTasksResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.tasks.v2beta2.ListTasksResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.tasks.v2beta2.ListTasksResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.tasks.v2beta2.ListTasksResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.tasks.v2beta2.ListTasksResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.tasks.v2beta2.ListTasksResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.tasks.v2beta2.ListTasksResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.tasks.v2beta2.ListTasksResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.tasks.v2beta2.ListTasksResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.tasks.v2beta2.ListTasksResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.tasks.v2beta2.ListTasksResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for listing tasks using * [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. * </pre> * * Protobuf type {@code google.cloud.tasks.v2beta2.ListTasksResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.tasks.v2beta2.ListTasksResponse) com.google.cloud.tasks.v2beta2.ListTasksResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.tasks.v2beta2.CloudTasksProto .internal_static_google_cloud_tasks_v2beta2_ListTasksResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.tasks.v2beta2.CloudTasksProto .internal_static_google_cloud_tasks_v2beta2_ListTasksResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.tasks.v2beta2.ListTasksResponse.class, com.google.cloud.tasks.v2beta2.ListTasksResponse.Builder.class); } // Construct using com.google.cloud.tasks.v2beta2.ListTasksResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (tasksBuilder_ == null) { tasks_ = java.util.Collections.emptyList(); } else { tasks_ = null; tasksBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.tasks.v2beta2.CloudTasksProto .internal_static_google_cloud_tasks_v2beta2_ListTasksResponse_descriptor; } @java.lang.Override public com.google.cloud.tasks.v2beta2.ListTasksResponse getDefaultInstanceForType() { return com.google.cloud.tasks.v2beta2.ListTasksResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.tasks.v2beta2.ListTasksResponse build() { com.google.cloud.tasks.v2beta2.ListTasksResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.tasks.v2beta2.ListTasksResponse buildPartial() { com.google.cloud.tasks.v2beta2.ListTasksResponse result = new com.google.cloud.tasks.v2beta2.ListTasksResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.tasks.v2beta2.ListTasksResponse result) { if (tasksBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { tasks_ = java.util.Collections.unmodifiableList(tasks_); bitField0_ = (bitField0_ & ~0x00000001); } result.tasks_ = tasks_; } else { result.tasks_ = tasksBuilder_.build(); } } private void buildPartial0(com.google.cloud.tasks.v2beta2.ListTasksResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.tasks.v2beta2.ListTasksResponse) { return mergeFrom((com.google.cloud.tasks.v2beta2.ListTasksResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.tasks.v2beta2.ListTasksResponse other) { if (other == com.google.cloud.tasks.v2beta2.ListTasksResponse.getDefaultInstance()) return this; if (tasksBuilder_ == null) { if (!other.tasks_.isEmpty()) { if (tasks_.isEmpty()) { tasks_ = other.tasks_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureTasksIsMutable(); tasks_.addAll(other.tasks_); } onChanged(); } } else { if (!other.tasks_.isEmpty()) { if (tasksBuilder_.isEmpty()) { tasksBuilder_.dispose(); tasksBuilder_ = null; tasks_ = other.tasks_; bitField0_ = (bitField0_ & ~0x00000001); tasksBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getTasksFieldBuilder() : null; } else { tasksBuilder_.addAllMessages(other.tasks_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.tasks.v2beta2.Task m = input.readMessage( com.google.cloud.tasks.v2beta2.Task.parser(), extensionRegistry); if (tasksBuilder_ == null) { ensureTasksIsMutable(); tasks_.add(m); } else { tasksBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.tasks.v2beta2.Task> tasks_ = java.util.Collections.emptyList(); private void ensureTasksIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { tasks_ = new java.util.ArrayList<com.google.cloud.tasks.v2beta2.Task>(tasks_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.tasks.v2beta2.Task, com.google.cloud.tasks.v2beta2.Task.Builder, com.google.cloud.tasks.v2beta2.TaskOrBuilder> tasksBuilder_; /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta2.Task tasks = 1;</code> */ public java.util.List<com.google.cloud.tasks.v2beta2.Task> getTasksList() { if (tasksBuilder_ == null) { return java.util.Collections.unmodifiableList(tasks_); } else { return tasksBuilder_.getMessageList(); } } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta2.Task tasks = 1;</code> */ public int getTasksCount() { if (tasksBuilder_ == null) { return tasks_.size(); } else { return tasksBuilder_.getCount(); } } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta2.Task tasks = 1;</code> */ public com.google.cloud.tasks.v2beta2.Task getTasks(int index) { if (tasksBuilder_ == null) { return tasks_.get(index); } else { return tasksBuilder_.getMessage(index); } } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta2.Task tasks = 1;</code> */ public Builder setTasks(int index, com.google.cloud.tasks.v2beta2.Task value) { if (tasksBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTasksIsMutable(); tasks_.set(index, value); onChanged(); } else { tasksBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta2.Task tasks = 1;</code> */ public Builder setTasks( int index, com.google.cloud.tasks.v2beta2.Task.Builder builderForValue) { if (tasksBuilder_ == null) { ensureTasksIsMutable(); tasks_.set(index, builderForValue.build()); onChanged(); } else { tasksBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta2.Task tasks = 1;</code> */ public Builder addTasks(com.google.cloud.tasks.v2beta2.Task value) { if (tasksBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTasksIsMutable(); tasks_.add(value); onChanged(); } else { tasksBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta2.Task tasks = 1;</code> */ public Builder addTasks(int index, com.google.cloud.tasks.v2beta2.Task value) { if (tasksBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTasksIsMutable(); tasks_.add(index, value); onChanged(); } else { tasksBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta2.Task tasks = 1;</code> */ public Builder addTasks(com.google.cloud.tasks.v2beta2.Task.Builder builderForValue) { if (tasksBuilder_ == null) { ensureTasksIsMutable(); tasks_.add(builderForValue.build()); onChanged(); } else { tasksBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta2.Task tasks = 1;</code> */ public Builder addTasks( int index, com.google.cloud.tasks.v2beta2.Task.Builder builderForValue) { if (tasksBuilder_ == null) { ensureTasksIsMutable(); tasks_.add(index, builderForValue.build()); onChanged(); } else { tasksBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta2.Task tasks = 1;</code> */ public Builder addAllTasks( java.lang.Iterable<? extends com.google.cloud.tasks.v2beta2.Task> values) { if (tasksBuilder_ == null) { ensureTasksIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, tasks_); onChanged(); } else { tasksBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta2.Task tasks = 1;</code> */ public Builder clearTasks() { if (tasksBuilder_ == null) { tasks_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { tasksBuilder_.clear(); } return this; } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta2.Task tasks = 1;</code> */ public Builder removeTasks(int index) { if (tasksBuilder_ == null) { ensureTasksIsMutable(); tasks_.remove(index); onChanged(); } else { tasksBuilder_.remove(index); } return this; } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta2.Task tasks = 1;</code> */ public com.google.cloud.tasks.v2beta2.Task.Builder getTasksBuilder(int index) { return getTasksFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta2.Task tasks = 1;</code> */ public com.google.cloud.tasks.v2beta2.TaskOrBuilder getTasksOrBuilder(int index) { if (tasksBuilder_ == null) { return tasks_.get(index); } else { return tasksBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta2.Task tasks = 1;</code> */ public java.util.List<? extends com.google.cloud.tasks.v2beta2.TaskOrBuilder> getTasksOrBuilderList() { if (tasksBuilder_ != null) { return tasksBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(tasks_); } } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta2.Task tasks = 1;</code> */ public com.google.cloud.tasks.v2beta2.Task.Builder addTasksBuilder() { return getTasksFieldBuilder() .addBuilder(com.google.cloud.tasks.v2beta2.Task.getDefaultInstance()); } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta2.Task tasks = 1;</code> */ public com.google.cloud.tasks.v2beta2.Task.Builder addTasksBuilder(int index) { return getTasksFieldBuilder() .addBuilder(index, com.google.cloud.tasks.v2beta2.Task.getDefaultInstance()); } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta2.Task tasks = 1;</code> */ public java.util.List<com.google.cloud.tasks.v2beta2.Task.Builder> getTasksBuilderList() { return getTasksFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.tasks.v2beta2.Task, com.google.cloud.tasks.v2beta2.Task.Builder, com.google.cloud.tasks.v2beta2.TaskOrBuilder> getTasksFieldBuilder() { if (tasksBuilder_ == null) { tasksBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.tasks.v2beta2.Task, com.google.cloud.tasks.v2beta2.Task.Builder, com.google.cloud.tasks.v2beta2.TaskOrBuilder>( tasks_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); tasks_ = null; } return tasksBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve next page of results. * * To return the next page of results, call * [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks] with this * value as the * [page_token][google.cloud.tasks.v2beta2.ListTasksRequest.page_token]. * * If the next_page_token is empty, there are no more results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token to retrieve next page of results. * * To return the next page of results, call * [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks] with this * value as the * [page_token][google.cloud.tasks.v2beta2.ListTasksRequest.page_token]. * * If the next_page_token is empty, there are no more results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token to retrieve next page of results. * * To return the next page of results, call * [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks] with this * value as the * [page_token][google.cloud.tasks.v2beta2.ListTasksRequest.page_token]. * * If the next_page_token is empty, there are no more results. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token to retrieve next page of results. * * To return the next page of results, call * [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks] with this * value as the * [page_token][google.cloud.tasks.v2beta2.ListTasksRequest.page_token]. * * If the next_page_token is empty, there are no more results. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token to retrieve next page of results. * * To return the next page of results, call * [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks] with this * value as the * [page_token][google.cloud.tasks.v2beta2.ListTasksRequest.page_token]. * * If the next_page_token is empty, there are no more results. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.tasks.v2beta2.ListTasksResponse) } // @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.ListTasksResponse) private static final com.google.cloud.tasks.v2beta2.ListTasksResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.tasks.v2beta2.ListTasksResponse(); } public static com.google.cloud.tasks.v2beta2.ListTasksResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListTasksResponse> PARSER = new com.google.protobuf.AbstractParser<ListTasksResponse>() { @java.lang.Override public ListTasksResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListTasksResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListTasksResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.tasks.v2beta2.ListTasksResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,151
java-tasks/proto-google-cloud-tasks-v2beta3/src/main/java/com/google/cloud/tasks/v2beta3/ListTasksResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/tasks/v2beta3/cloudtasks.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.tasks.v2beta3; /** * * * <pre> * Response message for listing tasks using * [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. * </pre> * * Protobuf type {@code google.cloud.tasks.v2beta3.ListTasksResponse} */ public final class ListTasksResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.tasks.v2beta3.ListTasksResponse) ListTasksResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListTasksResponse.newBuilder() to construct. private ListTasksResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListTasksResponse() { tasks_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListTasksResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.tasks.v2beta3.CloudTasksProto .internal_static_google_cloud_tasks_v2beta3_ListTasksResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.tasks.v2beta3.CloudTasksProto .internal_static_google_cloud_tasks_v2beta3_ListTasksResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.tasks.v2beta3.ListTasksResponse.class, com.google.cloud.tasks.v2beta3.ListTasksResponse.Builder.class); } public static final int TASKS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.tasks.v2beta3.Task> tasks_; /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta3.Task tasks = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.tasks.v2beta3.Task> getTasksList() { return tasks_; } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta3.Task tasks = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.tasks.v2beta3.TaskOrBuilder> getTasksOrBuilderList() { return tasks_; } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta3.Task tasks = 1;</code> */ @java.lang.Override public int getTasksCount() { return tasks_.size(); } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta3.Task tasks = 1;</code> */ @java.lang.Override public com.google.cloud.tasks.v2beta3.Task getTasks(int index) { return tasks_.get(index); } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta3.Task tasks = 1;</code> */ @java.lang.Override public com.google.cloud.tasks.v2beta3.TaskOrBuilder getTasksOrBuilder(int index) { return tasks_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve next page of results. * * To return the next page of results, call * [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks] with this * value as the * [page_token][google.cloud.tasks.v2beta3.ListTasksRequest.page_token]. * * If the next_page_token is empty, there are no more results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token to retrieve next page of results. * * To return the next page of results, call * [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks] with this * value as the * [page_token][google.cloud.tasks.v2beta3.ListTasksRequest.page_token]. * * If the next_page_token is empty, there are no more results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < tasks_.size(); i++) { output.writeMessage(1, tasks_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < tasks_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, tasks_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.tasks.v2beta3.ListTasksResponse)) { return super.equals(obj); } com.google.cloud.tasks.v2beta3.ListTasksResponse other = (com.google.cloud.tasks.v2beta3.ListTasksResponse) obj; if (!getTasksList().equals(other.getTasksList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getTasksCount() > 0) { hash = (37 * hash) + TASKS_FIELD_NUMBER; hash = (53 * hash) + getTasksList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.tasks.v2beta3.ListTasksResponse parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.tasks.v2beta3.ListTasksResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.tasks.v2beta3.ListTasksResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.tasks.v2beta3.ListTasksResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.tasks.v2beta3.ListTasksResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.tasks.v2beta3.ListTasksResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.tasks.v2beta3.ListTasksResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.tasks.v2beta3.ListTasksResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.tasks.v2beta3.ListTasksResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.tasks.v2beta3.ListTasksResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.tasks.v2beta3.ListTasksResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.tasks.v2beta3.ListTasksResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.tasks.v2beta3.ListTasksResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for listing tasks using * [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. * </pre> * * Protobuf type {@code google.cloud.tasks.v2beta3.ListTasksResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.tasks.v2beta3.ListTasksResponse) com.google.cloud.tasks.v2beta3.ListTasksResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.tasks.v2beta3.CloudTasksProto .internal_static_google_cloud_tasks_v2beta3_ListTasksResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.tasks.v2beta3.CloudTasksProto .internal_static_google_cloud_tasks_v2beta3_ListTasksResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.tasks.v2beta3.ListTasksResponse.class, com.google.cloud.tasks.v2beta3.ListTasksResponse.Builder.class); } // Construct using com.google.cloud.tasks.v2beta3.ListTasksResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (tasksBuilder_ == null) { tasks_ = java.util.Collections.emptyList(); } else { tasks_ = null; tasksBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.tasks.v2beta3.CloudTasksProto .internal_static_google_cloud_tasks_v2beta3_ListTasksResponse_descriptor; } @java.lang.Override public com.google.cloud.tasks.v2beta3.ListTasksResponse getDefaultInstanceForType() { return com.google.cloud.tasks.v2beta3.ListTasksResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.tasks.v2beta3.ListTasksResponse build() { com.google.cloud.tasks.v2beta3.ListTasksResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.tasks.v2beta3.ListTasksResponse buildPartial() { com.google.cloud.tasks.v2beta3.ListTasksResponse result = new com.google.cloud.tasks.v2beta3.ListTasksResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.tasks.v2beta3.ListTasksResponse result) { if (tasksBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { tasks_ = java.util.Collections.unmodifiableList(tasks_); bitField0_ = (bitField0_ & ~0x00000001); } result.tasks_ = tasks_; } else { result.tasks_ = tasksBuilder_.build(); } } private void buildPartial0(com.google.cloud.tasks.v2beta3.ListTasksResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.tasks.v2beta3.ListTasksResponse) { return mergeFrom((com.google.cloud.tasks.v2beta3.ListTasksResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.tasks.v2beta3.ListTasksResponse other) { if (other == com.google.cloud.tasks.v2beta3.ListTasksResponse.getDefaultInstance()) return this; if (tasksBuilder_ == null) { if (!other.tasks_.isEmpty()) { if (tasks_.isEmpty()) { tasks_ = other.tasks_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureTasksIsMutable(); tasks_.addAll(other.tasks_); } onChanged(); } } else { if (!other.tasks_.isEmpty()) { if (tasksBuilder_.isEmpty()) { tasksBuilder_.dispose(); tasksBuilder_ = null; tasks_ = other.tasks_; bitField0_ = (bitField0_ & ~0x00000001); tasksBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getTasksFieldBuilder() : null; } else { tasksBuilder_.addAllMessages(other.tasks_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.tasks.v2beta3.Task m = input.readMessage( com.google.cloud.tasks.v2beta3.Task.parser(), extensionRegistry); if (tasksBuilder_ == null) { ensureTasksIsMutable(); tasks_.add(m); } else { tasksBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.tasks.v2beta3.Task> tasks_ = java.util.Collections.emptyList(); private void ensureTasksIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { tasks_ = new java.util.ArrayList<com.google.cloud.tasks.v2beta3.Task>(tasks_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.tasks.v2beta3.Task, com.google.cloud.tasks.v2beta3.Task.Builder, com.google.cloud.tasks.v2beta3.TaskOrBuilder> tasksBuilder_; /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta3.Task tasks = 1;</code> */ public java.util.List<com.google.cloud.tasks.v2beta3.Task> getTasksList() { if (tasksBuilder_ == null) { return java.util.Collections.unmodifiableList(tasks_); } else { return tasksBuilder_.getMessageList(); } } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta3.Task tasks = 1;</code> */ public int getTasksCount() { if (tasksBuilder_ == null) { return tasks_.size(); } else { return tasksBuilder_.getCount(); } } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta3.Task tasks = 1;</code> */ public com.google.cloud.tasks.v2beta3.Task getTasks(int index) { if (tasksBuilder_ == null) { return tasks_.get(index); } else { return tasksBuilder_.getMessage(index); } } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta3.Task tasks = 1;</code> */ public Builder setTasks(int index, com.google.cloud.tasks.v2beta3.Task value) { if (tasksBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTasksIsMutable(); tasks_.set(index, value); onChanged(); } else { tasksBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta3.Task tasks = 1;</code> */ public Builder setTasks( int index, com.google.cloud.tasks.v2beta3.Task.Builder builderForValue) { if (tasksBuilder_ == null) { ensureTasksIsMutable(); tasks_.set(index, builderForValue.build()); onChanged(); } else { tasksBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta3.Task tasks = 1;</code> */ public Builder addTasks(com.google.cloud.tasks.v2beta3.Task value) { if (tasksBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTasksIsMutable(); tasks_.add(value); onChanged(); } else { tasksBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta3.Task tasks = 1;</code> */ public Builder addTasks(int index, com.google.cloud.tasks.v2beta3.Task value) { if (tasksBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTasksIsMutable(); tasks_.add(index, value); onChanged(); } else { tasksBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta3.Task tasks = 1;</code> */ public Builder addTasks(com.google.cloud.tasks.v2beta3.Task.Builder builderForValue) { if (tasksBuilder_ == null) { ensureTasksIsMutable(); tasks_.add(builderForValue.build()); onChanged(); } else { tasksBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta3.Task tasks = 1;</code> */ public Builder addTasks( int index, com.google.cloud.tasks.v2beta3.Task.Builder builderForValue) { if (tasksBuilder_ == null) { ensureTasksIsMutable(); tasks_.add(index, builderForValue.build()); onChanged(); } else { tasksBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta3.Task tasks = 1;</code> */ public Builder addAllTasks( java.lang.Iterable<? extends com.google.cloud.tasks.v2beta3.Task> values) { if (tasksBuilder_ == null) { ensureTasksIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, tasks_); onChanged(); } else { tasksBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta3.Task tasks = 1;</code> */ public Builder clearTasks() { if (tasksBuilder_ == null) { tasks_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { tasksBuilder_.clear(); } return this; } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta3.Task tasks = 1;</code> */ public Builder removeTasks(int index) { if (tasksBuilder_ == null) { ensureTasksIsMutable(); tasks_.remove(index); onChanged(); } else { tasksBuilder_.remove(index); } return this; } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta3.Task tasks = 1;</code> */ public com.google.cloud.tasks.v2beta3.Task.Builder getTasksBuilder(int index) { return getTasksFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta3.Task tasks = 1;</code> */ public com.google.cloud.tasks.v2beta3.TaskOrBuilder getTasksOrBuilder(int index) { if (tasksBuilder_ == null) { return tasks_.get(index); } else { return tasksBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta3.Task tasks = 1;</code> */ public java.util.List<? extends com.google.cloud.tasks.v2beta3.TaskOrBuilder> getTasksOrBuilderList() { if (tasksBuilder_ != null) { return tasksBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(tasks_); } } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta3.Task tasks = 1;</code> */ public com.google.cloud.tasks.v2beta3.Task.Builder addTasksBuilder() { return getTasksFieldBuilder() .addBuilder(com.google.cloud.tasks.v2beta3.Task.getDefaultInstance()); } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta3.Task tasks = 1;</code> */ public com.google.cloud.tasks.v2beta3.Task.Builder addTasksBuilder(int index) { return getTasksFieldBuilder() .addBuilder(index, com.google.cloud.tasks.v2beta3.Task.getDefaultInstance()); } /** * * * <pre> * The list of tasks. * </pre> * * <code>repeated .google.cloud.tasks.v2beta3.Task tasks = 1;</code> */ public java.util.List<com.google.cloud.tasks.v2beta3.Task.Builder> getTasksBuilderList() { return getTasksFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.tasks.v2beta3.Task, com.google.cloud.tasks.v2beta3.Task.Builder, com.google.cloud.tasks.v2beta3.TaskOrBuilder> getTasksFieldBuilder() { if (tasksBuilder_ == null) { tasksBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.tasks.v2beta3.Task, com.google.cloud.tasks.v2beta3.Task.Builder, com.google.cloud.tasks.v2beta3.TaskOrBuilder>( tasks_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); tasks_ = null; } return tasksBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve next page of results. * * To return the next page of results, call * [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks] with this * value as the * [page_token][google.cloud.tasks.v2beta3.ListTasksRequest.page_token]. * * If the next_page_token is empty, there are no more results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token to retrieve next page of results. * * To return the next page of results, call * [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks] with this * value as the * [page_token][google.cloud.tasks.v2beta3.ListTasksRequest.page_token]. * * If the next_page_token is empty, there are no more results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token to retrieve next page of results. * * To return the next page of results, call * [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks] with this * value as the * [page_token][google.cloud.tasks.v2beta3.ListTasksRequest.page_token]. * * If the next_page_token is empty, there are no more results. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token to retrieve next page of results. * * To return the next page of results, call * [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks] with this * value as the * [page_token][google.cloud.tasks.v2beta3.ListTasksRequest.page_token]. * * If the next_page_token is empty, there are no more results. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token to retrieve next page of results. * * To return the next page of results, call * [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks] with this * value as the * [page_token][google.cloud.tasks.v2beta3.ListTasksRequest.page_token]. * * If the next_page_token is empty, there are no more results. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.tasks.v2beta3.ListTasksResponse) } // @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.ListTasksResponse) private static final com.google.cloud.tasks.v2beta3.ListTasksResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.tasks.v2beta3.ListTasksResponse(); } public static com.google.cloud.tasks.v2beta3.ListTasksResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListTasksResponse> PARSER = new com.google.protobuf.AbstractParser<ListTasksResponse>() { @java.lang.Override public ListTasksResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListTasksResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListTasksResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.tasks.v2beta3.ListTasksResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,313
java-datalabeling/proto-google-cloud-datalabeling-v1beta1/src/main/java/com/google/cloud/datalabeling/v1beta1/EvaluationMetrics.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/datalabeling/v1beta1/evaluation.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.datalabeling.v1beta1; /** Protobuf type {@code google.cloud.datalabeling.v1beta1.EvaluationMetrics} */ public final class EvaluationMetrics extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.datalabeling.v1beta1.EvaluationMetrics) EvaluationMetricsOrBuilder { private static final long serialVersionUID = 0L; // Use EvaluationMetrics.newBuilder() to construct. private EvaluationMetrics(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private EvaluationMetrics() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new EvaluationMetrics(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datalabeling.v1beta1.EvaluationOuterClass .internal_static_google_cloud_datalabeling_v1beta1_EvaluationMetrics_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datalabeling.v1beta1.EvaluationOuterClass .internal_static_google_cloud_datalabeling_v1beta1_EvaluationMetrics_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datalabeling.v1beta1.EvaluationMetrics.class, com.google.cloud.datalabeling.v1beta1.EvaluationMetrics.Builder.class); } private int metricsCase_ = 0; @SuppressWarnings("serial") private java.lang.Object metrics_; public enum MetricsCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { CLASSIFICATION_METRICS(1), OBJECT_DETECTION_METRICS(2), METRICS_NOT_SET(0); private final int value; private MetricsCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static MetricsCase valueOf(int value) { return forNumber(value); } public static MetricsCase forNumber(int value) { switch (value) { case 1: return CLASSIFICATION_METRICS; case 2: return OBJECT_DETECTION_METRICS; case 0: return METRICS_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public MetricsCase getMetricsCase() { return MetricsCase.forNumber(metricsCase_); } public static final int CLASSIFICATION_METRICS_FIELD_NUMBER = 1; /** * <code>.google.cloud.datalabeling.v1beta1.ClassificationMetrics classification_metrics = 1; * </code> * * @return Whether the classificationMetrics field is set. */ @java.lang.Override public boolean hasClassificationMetrics() { return metricsCase_ == 1; } /** * <code>.google.cloud.datalabeling.v1beta1.ClassificationMetrics classification_metrics = 1; * </code> * * @return The classificationMetrics. */ @java.lang.Override public com.google.cloud.datalabeling.v1beta1.ClassificationMetrics getClassificationMetrics() { if (metricsCase_ == 1) { return (com.google.cloud.datalabeling.v1beta1.ClassificationMetrics) metrics_; } return com.google.cloud.datalabeling.v1beta1.ClassificationMetrics.getDefaultInstance(); } /** * <code>.google.cloud.datalabeling.v1beta1.ClassificationMetrics classification_metrics = 1; * </code> */ @java.lang.Override public com.google.cloud.datalabeling.v1beta1.ClassificationMetricsOrBuilder getClassificationMetricsOrBuilder() { if (metricsCase_ == 1) { return (com.google.cloud.datalabeling.v1beta1.ClassificationMetrics) metrics_; } return com.google.cloud.datalabeling.v1beta1.ClassificationMetrics.getDefaultInstance(); } public static final int OBJECT_DETECTION_METRICS_FIELD_NUMBER = 2; /** * <code>.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics object_detection_metrics = 2; * </code> * * @return Whether the objectDetectionMetrics field is set. */ @java.lang.Override public boolean hasObjectDetectionMetrics() { return metricsCase_ == 2; } /** * <code>.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics object_detection_metrics = 2; * </code> * * @return The objectDetectionMetrics. */ @java.lang.Override public com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics getObjectDetectionMetrics() { if (metricsCase_ == 2) { return (com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics) metrics_; } return com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics.getDefaultInstance(); } /** * <code>.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics object_detection_metrics = 2; * </code> */ @java.lang.Override public com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetricsOrBuilder getObjectDetectionMetricsOrBuilder() { if (metricsCase_ == 2) { return (com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics) metrics_; } return com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (metricsCase_ == 1) { output.writeMessage( 1, (com.google.cloud.datalabeling.v1beta1.ClassificationMetrics) metrics_); } if (metricsCase_ == 2) { output.writeMessage( 2, (com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics) metrics_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (metricsCase_ == 1) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 1, (com.google.cloud.datalabeling.v1beta1.ClassificationMetrics) metrics_); } if (metricsCase_ == 2) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 2, (com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics) metrics_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.datalabeling.v1beta1.EvaluationMetrics)) { return super.equals(obj); } com.google.cloud.datalabeling.v1beta1.EvaluationMetrics other = (com.google.cloud.datalabeling.v1beta1.EvaluationMetrics) obj; if (!getMetricsCase().equals(other.getMetricsCase())) return false; switch (metricsCase_) { case 1: if (!getClassificationMetrics().equals(other.getClassificationMetrics())) return false; break; case 2: if (!getObjectDetectionMetrics().equals(other.getObjectDetectionMetrics())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); switch (metricsCase_) { case 1: hash = (37 * hash) + CLASSIFICATION_METRICS_FIELD_NUMBER; hash = (53 * hash) + getClassificationMetrics().hashCode(); break; case 2: hash = (37 * hash) + OBJECT_DETECTION_METRICS_FIELD_NUMBER; hash = (53 * hash) + getObjectDetectionMetrics().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.datalabeling.v1beta1.EvaluationMetrics parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datalabeling.v1beta1.EvaluationMetrics parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datalabeling.v1beta1.EvaluationMetrics parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datalabeling.v1beta1.EvaluationMetrics parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datalabeling.v1beta1.EvaluationMetrics parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datalabeling.v1beta1.EvaluationMetrics parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datalabeling.v1beta1.EvaluationMetrics parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datalabeling.v1beta1.EvaluationMetrics parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datalabeling.v1beta1.EvaluationMetrics parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.datalabeling.v1beta1.EvaluationMetrics parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datalabeling.v1beta1.EvaluationMetrics parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datalabeling.v1beta1.EvaluationMetrics parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.datalabeling.v1beta1.EvaluationMetrics prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** Protobuf type {@code google.cloud.datalabeling.v1beta1.EvaluationMetrics} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.datalabeling.v1beta1.EvaluationMetrics) com.google.cloud.datalabeling.v1beta1.EvaluationMetricsOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datalabeling.v1beta1.EvaluationOuterClass .internal_static_google_cloud_datalabeling_v1beta1_EvaluationMetrics_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datalabeling.v1beta1.EvaluationOuterClass .internal_static_google_cloud_datalabeling_v1beta1_EvaluationMetrics_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datalabeling.v1beta1.EvaluationMetrics.class, com.google.cloud.datalabeling.v1beta1.EvaluationMetrics.Builder.class); } // Construct using com.google.cloud.datalabeling.v1beta1.EvaluationMetrics.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (classificationMetricsBuilder_ != null) { classificationMetricsBuilder_.clear(); } if (objectDetectionMetricsBuilder_ != null) { objectDetectionMetricsBuilder_.clear(); } metricsCase_ = 0; metrics_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.datalabeling.v1beta1.EvaluationOuterClass .internal_static_google_cloud_datalabeling_v1beta1_EvaluationMetrics_descriptor; } @java.lang.Override public com.google.cloud.datalabeling.v1beta1.EvaluationMetrics getDefaultInstanceForType() { return com.google.cloud.datalabeling.v1beta1.EvaluationMetrics.getDefaultInstance(); } @java.lang.Override public com.google.cloud.datalabeling.v1beta1.EvaluationMetrics build() { com.google.cloud.datalabeling.v1beta1.EvaluationMetrics result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.datalabeling.v1beta1.EvaluationMetrics buildPartial() { com.google.cloud.datalabeling.v1beta1.EvaluationMetrics result = new com.google.cloud.datalabeling.v1beta1.EvaluationMetrics(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.cloud.datalabeling.v1beta1.EvaluationMetrics result) { int from_bitField0_ = bitField0_; } private void buildPartialOneofs( com.google.cloud.datalabeling.v1beta1.EvaluationMetrics result) { result.metricsCase_ = metricsCase_; result.metrics_ = this.metrics_; if (metricsCase_ == 1 && classificationMetricsBuilder_ != null) { result.metrics_ = classificationMetricsBuilder_.build(); } if (metricsCase_ == 2 && objectDetectionMetricsBuilder_ != null) { result.metrics_ = objectDetectionMetricsBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.datalabeling.v1beta1.EvaluationMetrics) { return mergeFrom((com.google.cloud.datalabeling.v1beta1.EvaluationMetrics) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.datalabeling.v1beta1.EvaluationMetrics other) { if (other == com.google.cloud.datalabeling.v1beta1.EvaluationMetrics.getDefaultInstance()) return this; switch (other.getMetricsCase()) { case CLASSIFICATION_METRICS: { mergeClassificationMetrics(other.getClassificationMetrics()); break; } case OBJECT_DETECTION_METRICS: { mergeObjectDetectionMetrics(other.getObjectDetectionMetrics()); break; } case METRICS_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getClassificationMetricsFieldBuilder().getBuilder(), extensionRegistry); metricsCase_ = 1; break; } // case 10 case 18: { input.readMessage( getObjectDetectionMetricsFieldBuilder().getBuilder(), extensionRegistry); metricsCase_ = 2; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int metricsCase_ = 0; private java.lang.Object metrics_; public MetricsCase getMetricsCase() { return MetricsCase.forNumber(metricsCase_); } public Builder clearMetrics() { metricsCase_ = 0; metrics_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.datalabeling.v1beta1.ClassificationMetrics, com.google.cloud.datalabeling.v1beta1.ClassificationMetrics.Builder, com.google.cloud.datalabeling.v1beta1.ClassificationMetricsOrBuilder> classificationMetricsBuilder_; /** * <code>.google.cloud.datalabeling.v1beta1.ClassificationMetrics classification_metrics = 1; * </code> * * @return Whether the classificationMetrics field is set. */ @java.lang.Override public boolean hasClassificationMetrics() { return metricsCase_ == 1; } /** * <code>.google.cloud.datalabeling.v1beta1.ClassificationMetrics classification_metrics = 1; * </code> * * @return The classificationMetrics. */ @java.lang.Override public com.google.cloud.datalabeling.v1beta1.ClassificationMetrics getClassificationMetrics() { if (classificationMetricsBuilder_ == null) { if (metricsCase_ == 1) { return (com.google.cloud.datalabeling.v1beta1.ClassificationMetrics) metrics_; } return com.google.cloud.datalabeling.v1beta1.ClassificationMetrics.getDefaultInstance(); } else { if (metricsCase_ == 1) { return classificationMetricsBuilder_.getMessage(); } return com.google.cloud.datalabeling.v1beta1.ClassificationMetrics.getDefaultInstance(); } } /** * <code>.google.cloud.datalabeling.v1beta1.ClassificationMetrics classification_metrics = 1; * </code> */ public Builder setClassificationMetrics( com.google.cloud.datalabeling.v1beta1.ClassificationMetrics value) { if (classificationMetricsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } metrics_ = value; onChanged(); } else { classificationMetricsBuilder_.setMessage(value); } metricsCase_ = 1; return this; } /** * <code>.google.cloud.datalabeling.v1beta1.ClassificationMetrics classification_metrics = 1; * </code> */ public Builder setClassificationMetrics( com.google.cloud.datalabeling.v1beta1.ClassificationMetrics.Builder builderForValue) { if (classificationMetricsBuilder_ == null) { metrics_ = builderForValue.build(); onChanged(); } else { classificationMetricsBuilder_.setMessage(builderForValue.build()); } metricsCase_ = 1; return this; } /** * <code>.google.cloud.datalabeling.v1beta1.ClassificationMetrics classification_metrics = 1; * </code> */ public Builder mergeClassificationMetrics( com.google.cloud.datalabeling.v1beta1.ClassificationMetrics value) { if (classificationMetricsBuilder_ == null) { if (metricsCase_ == 1 && metrics_ != com.google.cloud.datalabeling.v1beta1.ClassificationMetrics .getDefaultInstance()) { metrics_ = com.google.cloud.datalabeling.v1beta1.ClassificationMetrics.newBuilder( (com.google.cloud.datalabeling.v1beta1.ClassificationMetrics) metrics_) .mergeFrom(value) .buildPartial(); } else { metrics_ = value; } onChanged(); } else { if (metricsCase_ == 1) { classificationMetricsBuilder_.mergeFrom(value); } else { classificationMetricsBuilder_.setMessage(value); } } metricsCase_ = 1; return this; } /** * <code>.google.cloud.datalabeling.v1beta1.ClassificationMetrics classification_metrics = 1; * </code> */ public Builder clearClassificationMetrics() { if (classificationMetricsBuilder_ == null) { if (metricsCase_ == 1) { metricsCase_ = 0; metrics_ = null; onChanged(); } } else { if (metricsCase_ == 1) { metricsCase_ = 0; metrics_ = null; } classificationMetricsBuilder_.clear(); } return this; } /** * <code>.google.cloud.datalabeling.v1beta1.ClassificationMetrics classification_metrics = 1; * </code> */ public com.google.cloud.datalabeling.v1beta1.ClassificationMetrics.Builder getClassificationMetricsBuilder() { return getClassificationMetricsFieldBuilder().getBuilder(); } /** * <code>.google.cloud.datalabeling.v1beta1.ClassificationMetrics classification_metrics = 1; * </code> */ @java.lang.Override public com.google.cloud.datalabeling.v1beta1.ClassificationMetricsOrBuilder getClassificationMetricsOrBuilder() { if ((metricsCase_ == 1) && (classificationMetricsBuilder_ != null)) { return classificationMetricsBuilder_.getMessageOrBuilder(); } else { if (metricsCase_ == 1) { return (com.google.cloud.datalabeling.v1beta1.ClassificationMetrics) metrics_; } return com.google.cloud.datalabeling.v1beta1.ClassificationMetrics.getDefaultInstance(); } } /** * <code>.google.cloud.datalabeling.v1beta1.ClassificationMetrics classification_metrics = 1; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.datalabeling.v1beta1.ClassificationMetrics, com.google.cloud.datalabeling.v1beta1.ClassificationMetrics.Builder, com.google.cloud.datalabeling.v1beta1.ClassificationMetricsOrBuilder> getClassificationMetricsFieldBuilder() { if (classificationMetricsBuilder_ == null) { if (!(metricsCase_ == 1)) { metrics_ = com.google.cloud.datalabeling.v1beta1.ClassificationMetrics.getDefaultInstance(); } classificationMetricsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.datalabeling.v1beta1.ClassificationMetrics, com.google.cloud.datalabeling.v1beta1.ClassificationMetrics.Builder, com.google.cloud.datalabeling.v1beta1.ClassificationMetricsOrBuilder>( (com.google.cloud.datalabeling.v1beta1.ClassificationMetrics) metrics_, getParentForChildren(), isClean()); metrics_ = null; } metricsCase_ = 1; onChanged(); return classificationMetricsBuilder_; } private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics, com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics.Builder, com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetricsOrBuilder> objectDetectionMetricsBuilder_; /** * <code>.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics object_detection_metrics = 2; * </code> * * @return Whether the objectDetectionMetrics field is set. */ @java.lang.Override public boolean hasObjectDetectionMetrics() { return metricsCase_ == 2; } /** * <code>.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics object_detection_metrics = 2; * </code> * * @return The objectDetectionMetrics. */ @java.lang.Override public com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics getObjectDetectionMetrics() { if (objectDetectionMetricsBuilder_ == null) { if (metricsCase_ == 2) { return (com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics) metrics_; } return com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics.getDefaultInstance(); } else { if (metricsCase_ == 2) { return objectDetectionMetricsBuilder_.getMessage(); } return com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics.getDefaultInstance(); } } /** * <code>.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics object_detection_metrics = 2; * </code> */ public Builder setObjectDetectionMetrics( com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics value) { if (objectDetectionMetricsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } metrics_ = value; onChanged(); } else { objectDetectionMetricsBuilder_.setMessage(value); } metricsCase_ = 2; return this; } /** * <code>.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics object_detection_metrics = 2; * </code> */ public Builder setObjectDetectionMetrics( com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics.Builder builderForValue) { if (objectDetectionMetricsBuilder_ == null) { metrics_ = builderForValue.build(); onChanged(); } else { objectDetectionMetricsBuilder_.setMessage(builderForValue.build()); } metricsCase_ = 2; return this; } /** * <code>.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics object_detection_metrics = 2; * </code> */ public Builder mergeObjectDetectionMetrics( com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics value) { if (objectDetectionMetricsBuilder_ == null) { if (metricsCase_ == 2 && metrics_ != com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics .getDefaultInstance()) { metrics_ = com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics.newBuilder( (com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics) metrics_) .mergeFrom(value) .buildPartial(); } else { metrics_ = value; } onChanged(); } else { if (metricsCase_ == 2) { objectDetectionMetricsBuilder_.mergeFrom(value); } else { objectDetectionMetricsBuilder_.setMessage(value); } } metricsCase_ = 2; return this; } /** * <code>.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics object_detection_metrics = 2; * </code> */ public Builder clearObjectDetectionMetrics() { if (objectDetectionMetricsBuilder_ == null) { if (metricsCase_ == 2) { metricsCase_ = 0; metrics_ = null; onChanged(); } } else { if (metricsCase_ == 2) { metricsCase_ = 0; metrics_ = null; } objectDetectionMetricsBuilder_.clear(); } return this; } /** * <code>.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics object_detection_metrics = 2; * </code> */ public com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics.Builder getObjectDetectionMetricsBuilder() { return getObjectDetectionMetricsFieldBuilder().getBuilder(); } /** * <code>.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics object_detection_metrics = 2; * </code> */ @java.lang.Override public com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetricsOrBuilder getObjectDetectionMetricsOrBuilder() { if ((metricsCase_ == 2) && (objectDetectionMetricsBuilder_ != null)) { return objectDetectionMetricsBuilder_.getMessageOrBuilder(); } else { if (metricsCase_ == 2) { return (com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics) metrics_; } return com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics.getDefaultInstance(); } } /** * <code>.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics object_detection_metrics = 2; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics, com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics.Builder, com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetricsOrBuilder> getObjectDetectionMetricsFieldBuilder() { if (objectDetectionMetricsBuilder_ == null) { if (!(metricsCase_ == 2)) { metrics_ = com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics.getDefaultInstance(); } objectDetectionMetricsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics, com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics.Builder, com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetricsOrBuilder>( (com.google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics) metrics_, getParentForChildren(), isClean()); metrics_ = null; } metricsCase_ = 2; onChanged(); return objectDetectionMetricsBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.datalabeling.v1beta1.EvaluationMetrics) } // @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.EvaluationMetrics) private static final com.google.cloud.datalabeling.v1beta1.EvaluationMetrics DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.datalabeling.v1beta1.EvaluationMetrics(); } public static com.google.cloud.datalabeling.v1beta1.EvaluationMetrics getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<EvaluationMetrics> PARSER = new com.google.protobuf.AbstractParser<EvaluationMetrics>() { @java.lang.Override public EvaluationMetrics parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<EvaluationMetrics> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<EvaluationMetrics> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.datalabeling.v1beta1.EvaluationMetrics getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/qpid-broker-j
36,306
broker-plugins/management-http/src/test/java/org/apache/qpid/server/management/plugin/servlet/rest/RestUserPreferenceHandlerTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.qpid.server.management.plugin.servlet.rest; import static org.apache.qpid.server.management.plugin.HttpManagementConfiguration.DEFAULT_PREFERENCE_OPERATION_TIMEOUT; import static org.apache.qpid.server.model.preferences.PreferenceTestHelper.awaitPreferenceFuture; import static org.apache.qpid.server.model.preferences.PreferenceTestHelper.createPreferenceAttributes; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.security.Principal; import java.security.PrivilegedAction; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import javax.security.auth.Subject; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.apache.qpid.server.configuration.updater.CurrentThreadTaskExecutor; import org.apache.qpid.server.configuration.updater.TaskExecutor; import org.apache.qpid.server.model.ConfiguredObject; import org.apache.qpid.server.model.preferences.GenericPrincipal; import org.apache.qpid.server.model.preferences.Preference; import org.apache.qpid.server.model.preferences.PreferenceFactory; import org.apache.qpid.server.model.preferences.UserPreferences; import org.apache.qpid.server.model.preferences.UserPreferencesImpl; import org.apache.qpid.server.security.auth.AuthenticatedPrincipal; import org.apache.qpid.server.security.auth.TestPrincipalUtils; import org.apache.qpid.server.security.group.GroupPrincipal; import org.apache.qpid.server.store.preferences.PreferenceStore; import org.apache.qpid.test.utils.UnitTestBase; public class RestUserPreferenceHandlerTest extends UnitTestBase { private static final String MYGROUP = "mygroup"; private static final String MYGROUP_SERIALIZATION = TestPrincipalUtils.getTestPrincipalSerialization(MYGROUP); private static final String MYUSER = "myuser"; private static final String MYUSER_SERIALIZATION = TestPrincipalUtils.getTestPrincipalSerialization(MYUSER); private final RestUserPreferenceHandler _handler = new RestUserPreferenceHandler(DEFAULT_PREFERENCE_OPERATION_TIMEOUT); private ConfiguredObject<?> _configuredObject; private UserPreferences _userPreferences; private Subject _subject; private Principal _userPrincipal; private GroupPrincipal _groupPrincipal; private TaskExecutor _preferenceTaskExecutor; @BeforeEach public void setUp() throws Exception { _configuredObject = mock(ConfiguredObject.class); PreferenceStore preferenceStore = mock(PreferenceStore.class); _preferenceTaskExecutor = new CurrentThreadTaskExecutor(); _preferenceTaskExecutor.start(); _userPreferences = new UserPreferencesImpl(_preferenceTaskExecutor, _configuredObject, preferenceStore, List.of()); _subject = TestPrincipalUtils.createTestSubject(MYUSER, MYGROUP); _groupPrincipal = _subject.getPrincipals(GroupPrincipal.class).iterator().next(); _userPrincipal = _subject.getPrincipals(AuthenticatedPrincipal.class).iterator().next(); when(_configuredObject.getUserPreferences()).thenReturn(_userPreferences); } @AfterEach public void tearDown() { _preferenceTaskExecutor.stop(); } @Test public void testPutWithVisibilityList_ValidGroup() { final RequestInfo requestInfo = RequestInfo.createPreferencesRequestInfo(List.of(), List.of("X-testtype", "myprefname")); final Map<String, Object> pref = new HashMap<>(); pref.put(Preference.VALUE_ATTRIBUTE, Map.of()); pref.put(Preference.VISIBILITY_LIST_ATTRIBUTE, List.of(MYGROUP_SERIALIZATION)); Subject.doAs(_subject, (PrivilegedAction<Void>) () -> { _handler.handlePUT(_configuredObject, requestInfo, pref); Set<Preference> preferences = awaitPreferenceFuture(_userPreferences.getPreferences()); assertEquals(1, (long) preferences.size(), "Unexpected number of preferences"); Preference prefModel = preferences.iterator().next(); final Set<Principal> visibilityList = prefModel.getVisibilityList(); assertEquals(1, (long) visibilityList.size(), "Unexpected number of principals in visibility list"); Principal principal = visibilityList.iterator().next(); assertEquals(MYGROUP, principal.getName(), "Unexpected member of visibility list"); return null; }); } @Test public void testPutWithVisibilityList_InvalidGroup() { final RequestInfo requestInfo = RequestInfo.createPreferencesRequestInfo(List.of(), List.of("X-testtype", "myprefname")); final Map<String, Object> pref = new HashMap<>(); pref.put(Preference.VALUE_ATTRIBUTE, Map.of()); pref.put(Preference.VISIBILITY_LIST_ATTRIBUTE, List.of("Invalid Group")); Subject.doAs(_subject, (PrivilegedAction<Void>) () -> { try { _handler.handlePUT(_configuredObject, requestInfo, pref); fail("Expected exception not thrown"); } catch (IllegalArgumentException e) { // pass } return null; }); } @Test public void testPutByTypeAndName() { final String prefName = "myprefname"; final RequestInfo requestInfo = RequestInfo.createPreferencesRequestInfo(List.of(), List.of("X-testtype", prefName)); final Map<String, Object> pref = new HashMap<>(); pref.put(Preference.VALUE_ATTRIBUTE, Map.of()); Subject.doAs(_subject, (PrivilegedAction<Void>) () -> { _handler.handlePUT(_configuredObject, requestInfo, pref); Set<Preference> preferences = awaitPreferenceFuture(_userPreferences.getPreferences()); assertEquals(1, (long) preferences.size(), "Unexpected number of preferences"); Preference prefModel = preferences.iterator().next(); assertEquals(prefName, prefModel.getName(), "Unexpected preference name"); return null; }); } @Test public void testReplaceViaPutByTypeAndName() { final String prefName = "myprefname"; final RequestInfo requestInfo = RequestInfo.createPreferencesRequestInfo(List.of(), List.of("X-testtype", prefName)); final Map<String, Object> pref = new HashMap<>(); pref.put(Preference.VALUE_ATTRIBUTE, Map.of()); final Preference createdPreference = Subject.doAs(_subject, (PrivilegedAction<Preference>) () -> { _handler.handlePUT(_configuredObject, requestInfo, pref); Set<Preference> preferences = awaitPreferenceFuture (_userPreferences.getPreferences()); assertEquals(1, (long) preferences.size(), "Unexpected number of preferences"); Preference prefModel = preferences.iterator().next(); assertEquals(prefName, prefModel.getName(), "Unexpected preference name"); return prefModel; }); final Map<String, Object> replacementPref = new HashMap<>(); replacementPref.put(Preference.ID_ATTRIBUTE, createdPreference.getId().toString()); replacementPref.put(Preference.VALUE_ATTRIBUTE, Map.of()); final String changedDescription = "Replace that maintains id"; replacementPref.put(Preference.DESCRIPTION_ATTRIBUTE, changedDescription); Subject.doAs(_subject, (PrivilegedAction<Void>) () -> { _handler.handlePUT(_configuredObject, requestInfo, replacementPref); Set<Preference> preferences = awaitPreferenceFuture(_userPreferences.getPreferences()); assertEquals(1, (long) preferences.size(), "Unexpected number of preferences after update"); Preference updatedPref = preferences.iterator().next(); assertEquals(createdPreference.getId(), updatedPref.getId(), "Unexpected preference id"); assertEquals(prefName, updatedPref.getName(), "Unexpected preference name"); assertEquals(changedDescription, updatedPref.getDescription(), "Unexpected preference description"); return null; }); replacementPref.remove(Preference.ID_ATTRIBUTE); final String changedDescription2 = "Replace that omits id"; replacementPref.put(Preference.DESCRIPTION_ATTRIBUTE, changedDescription2); Subject.doAs(_subject, (PrivilegedAction<Void>) () -> { _handler.handlePUT(_configuredObject, requestInfo, replacementPref); Set<Preference> preferences = awaitPreferenceFuture(_userPreferences.getPreferences()); assertEquals(1, (long) preferences.size(), "Unexpected number of preferences after update"); Preference updatedPref = preferences.iterator().next(); assertNotEquals(createdPreference.getId(), updatedPref.getId(), "Replace without id should create new id"); assertEquals(prefName, updatedPref.getName(), "Unexpected preference name"); assertEquals(changedDescription2, updatedPref.getDescription(), "Unexpected preference description"); return null; }); } @Test public void testReplaceViaPutByType() { final String prefName = "myprefname"; final RequestInfo requestInfo = RequestInfo.createPreferencesRequestInfo(List.of(), List.of("X-testtype")); final Map<String, Object> pref = new HashMap<>(); pref.put(Preference.NAME_ATTRIBUTE, prefName); pref.put(Preference.VALUE_ATTRIBUTE, new HashMap<>()); Subject.doAs(_subject, (PrivilegedAction<Void>) () -> { _handler.handlePUT(_configuredObject, requestInfo, List.of(pref)); Set<Preference> preferences = awaitPreferenceFuture(_userPreferences.getPreferences()); assertEquals(1, (long) preferences.size(), "Unexpected number of preferences"); Preference prefModel = preferences.iterator().next(); assertEquals(prefName, prefModel.getName(), "Unexpected preference name"); return null; }); final String replacementPref1Name = "myprefreplacement1"; final String replacementPref2Name = "myprefreplacement2"; final Map<String, Object> replacementPref1 = new HashMap<>(); replacementPref1.put(Preference.NAME_ATTRIBUTE, replacementPref1Name); replacementPref1.put(Preference.VALUE_ATTRIBUTE, Map.of()); final Map<String, Object> replacementPref2 = new HashMap<>(); replacementPref2.put(Preference.NAME_ATTRIBUTE, replacementPref2Name); replacementPref2.put(Preference.VALUE_ATTRIBUTE, Map.of()); Subject.doAs(_subject, (PrivilegedAction<Void>) () -> { _handler.handlePUT(_configuredObject, requestInfo, List.of(replacementPref1, replacementPref2)); Set<Preference> preferences = awaitPreferenceFuture(_userPreferences.getPreferences()); assertEquals(2, (long) preferences.size(), "Unexpected number of preferences after update"); Set<String> prefNames = new HashSet<>(preferences.size()); for (Preference pref1 : preferences) { prefNames.add(pref1.getName()); } assertTrue(prefNames.contains(replacementPref1Name), "Replacement preference " + replacementPref1Name + " not found."); assertTrue(prefNames.contains(replacementPref2Name), "Replacement preference " + replacementPref2Name + " not found."); return null; }); } @Test public void testReplaceAllViaPut() { final String pref1Name = "mypref1name"; final String pref1Type = "X-testtype1"; final String pref2Name = "mypref2name"; final String pref2Type = "X-testtype2"; final RequestInfo requestInfo = RequestInfo.createPreferencesRequestInfo(List.of(), List.of()); final Map<String, Object> pref1 = new HashMap<>(); pref1.put(Preference.NAME_ATTRIBUTE, pref1Name); pref1.put(Preference.VALUE_ATTRIBUTE, Map.of()); pref1.put(Preference.TYPE_ATTRIBUTE, pref1Type); final Map<String, Object> pref2 = new HashMap<>(); pref2.put(Preference.NAME_ATTRIBUTE, pref2Name); pref2.put(Preference.VALUE_ATTRIBUTE, Map.of()); pref2.put(Preference.TYPE_ATTRIBUTE, pref2Type); final Map<String, List<Map<String, Object>>> payload = new HashMap<>(); payload.put(pref1Type, List.of(pref1)); payload.put(pref2Type, List.of(pref2)); Subject.doAs(_subject, (PrivilegedAction<Void>) () -> { _handler.handlePUT(_configuredObject, requestInfo, payload); Set<Preference> preferences = awaitPreferenceFuture(_userPreferences.getPreferences()); assertEquals(2, (long) preferences.size(), "Unexpected number of preferences"); return null; }); final String replacementPref1Name = "myprefreplacement1"; final Map<String, Object> replacementPref1 = new HashMap<>(); replacementPref1.put(Preference.NAME_ATTRIBUTE, replacementPref1Name); replacementPref1.put(Preference.VALUE_ATTRIBUTE, Map.of()); replacementPref1.put(Preference.TYPE_ATTRIBUTE, pref1Type); payload.clear(); payload.put(pref1Type, List.of(replacementPref1)); Subject.doAs(_subject, (PrivilegedAction<Void>) () -> { _handler.handlePUT(_configuredObject, requestInfo, payload); Set<Preference> preferences = awaitPreferenceFuture(_userPreferences.getPreferences()); assertEquals(1, (long) preferences.size(), "Unexpected number of preferences after update"); Preference prefModel = preferences.iterator().next(); assertEquals(replacementPref1Name, prefModel.getName(), "Unexpected preference name"); return null; }); } @Test public void testPostToTypeWithVisibilityList_ValidGroup() { final RequestInfo typeRequestInfo = RequestInfo.createPreferencesRequestInfo(List.of(), List.of("X-testtype")); final Map<String, Object> pref = new HashMap<>(); pref.put(Preference.NAME_ATTRIBUTE, "testPref"); pref.put(Preference.VALUE_ATTRIBUTE, Map.of()); pref.put(Preference.VISIBILITY_LIST_ATTRIBUTE, List.of(MYGROUP_SERIALIZATION)); Subject.doAs(_subject, (PrivilegedAction<Void>) () -> { _handler.handlePOST(_configuredObject, typeRequestInfo, List.of(pref)); Set<Preference> preferences = awaitPreferenceFuture(_userPreferences.getPreferences()); assertEquals(1, (long) preferences.size(), "Unexpected number of preferences"); Preference prefModel = preferences.iterator().next(); final Set<Principal> visibilityList = prefModel.getVisibilityList(); assertEquals(1, (long) visibilityList.size(), "Unexpected number of principals in visibility list"); Principal principal = visibilityList.iterator().next(); assertEquals(MYGROUP, principal.getName(), "Unexpected member of visibility list"); return null; }); } @Test public void testPostToRootWithVisibilityList_ValidGroup() { final RequestInfo rootRequestInfo = RequestInfo.createPreferencesRequestInfo(List.of(), List.of()); final Map<String, Object> pref = new HashMap<>(); pref.put(Preference.NAME_ATTRIBUTE, "testPref"); pref.put(Preference.VALUE_ATTRIBUTE, Map.of()); pref.put(Preference.VISIBILITY_LIST_ATTRIBUTE, List.of(MYGROUP_SERIALIZATION)); Subject.doAs(_subject, (PrivilegedAction<Void>) () -> { final Map<String, List<Map<String, Object>>> payload = Map.of("X-testtype2", List.of(pref)); _handler.handlePOST(_configuredObject, rootRequestInfo, payload); Set<Preference> preferences = awaitPreferenceFuture(_userPreferences.getPreferences()); assertEquals(1, (long) preferences.size(), "Unexpected number of preferences"); Preference prefModel = preferences.iterator().next(); final Set<Principal> visibilityList = prefModel.getVisibilityList(); assertEquals(1, (long) visibilityList.size(), "Unexpected number of principals in visibility list"); Principal principal = visibilityList.iterator().next(); assertEquals(MYGROUP, principal.getName(), "Unexpected member of visibility list"); return null; }); } @Test public void testPostToTypeWithVisibilityList_InvalidGroup() { final RequestInfo requestInfo = RequestInfo.createPreferencesRequestInfo(List.of(), List.of("X-testtype")); final Map<String, Object> pref = new HashMap<>(); pref.put(Preference.NAME_ATTRIBUTE, "testPref"); pref.put(Preference.VALUE_ATTRIBUTE, Map.of()); pref.put(Preference.VISIBILITY_LIST_ATTRIBUTE, List.of("Invalid Group")); Subject.doAs(_subject, (PrivilegedAction<Void>) () -> { try { _handler.handlePOST(_configuredObject, requestInfo, List.of(pref)); fail("Expected exception not thrown"); } catch (IllegalArgumentException e) { // pass } return null; }); } @Test public void testPostToRootWithVisibilityList_InvalidGroup() { final RequestInfo requestInfo = RequestInfo.createPreferencesRequestInfo(List.of(), List.of()); final Map<String, Object> pref = new HashMap<>(); pref.put(Preference.NAME_ATTRIBUTE, "testPref"); pref.put(Preference.VALUE_ATTRIBUTE, Map.of()); pref.put(Preference.VISIBILITY_LIST_ATTRIBUTE, List.of("Invalid Group")); Subject.doAs(_subject, (PrivilegedAction<Void>) () -> { try { final Map<String, List<Map<String, Object>>> payload = Map.of("X-testType", List.of(pref)); _handler.handlePOST(_configuredObject, requestInfo, payload); fail("Expected exception not thrown"); } catch (IllegalArgumentException e) { // pass } return null; }); } @Test public void testGetHasCorrectVisibilityList() { final RequestInfo rootRequestInfo = RequestInfo.createPreferencesRequestInfo(List.of(), List.of()); final String type = "X-testtype"; Subject.doAs(_subject, (PrivilegedAction<Void>) () -> { Map<String, Object> prefAttributes = createPreferenceAttributes( null, null, type, "testpref", null, MYUSER_SERIALIZATION, Set.of(MYGROUP_SERIALIZATION), Map.of()); Preference preference = PreferenceFactory.fromAttributes(_configuredObject, prefAttributes); awaitPreferenceFuture(_userPreferences.updateOrAppend(Set.of(preference))); Map<String, List<Map<String, Object>>> typeToPreferenceListMap = (Map<String, List<Map<String, Object>>>) _handler.handleGET(_userPreferences, rootRequestInfo); assertEquals(1, (long) typeToPreferenceListMap.size(), "Unexpected preference map size"); assertEquals(type, typeToPreferenceListMap.keySet().iterator().next(), "Unexpected type in preference map"); List<Map<String, Object>> preferences = typeToPreferenceListMap.get(type); assertEquals(1, (long) preferences.size(), "Unexpected number of preferences"); Set<Principal> visibilityList = (Set<Principal>) preferences.get(0).get("visibilityList"); assertEquals(1, (long) visibilityList.size(), "Unexpected number of principals in visibility list"); assertTrue(GenericPrincipal.principalsEqual(_groupPrincipal, visibilityList.iterator().next()), "Unexpected principal in visibility list"); return null; }); } @Test public void testGetById() { Subject.doAs(_subject, (PrivilegedAction<Void>) () -> { final String type = "X-testtype"; Map<String, Object> pref1Attributes = createPreferenceAttributes( null, null, type, "testpref", null, MYUSER_SERIALIZATION, null, Map.of()); Preference p1 = PreferenceFactory.fromAttributes(_configuredObject, pref1Attributes); Map<String, Object> pref2Attributes = createPreferenceAttributes( null, null, type, "testpref2", null, MYUSER_SERIALIZATION, null, Map.of()); Preference p2 = PreferenceFactory.fromAttributes(_configuredObject, pref2Attributes); awaitPreferenceFuture(_userPreferences.updateOrAppend(List.of(p1, p2))); UUID id = p1.getId(); final RequestInfo rootRequestInfo = RequestInfo.createPreferencesRequestInfo(List.of(), List.of(), Map.of("id", List.of(id.toString()))); Map<String, List<Map<String, Object>>> typeToPreferenceListMap = (Map<String, List<Map<String, Object>>>) _handler.handleGET(_userPreferences, rootRequestInfo); assertEquals(1, (long) typeToPreferenceListMap.size(), "Unexpected p1 map size"); assertEquals(type, typeToPreferenceListMap.keySet().iterator().next(), "Unexpected type in p1 map"); List<Map<String, Object>> preferences = typeToPreferenceListMap.get(type); assertEquals(1, (long) preferences.size(), "Unexpected number of preferences"); assertEquals(id, preferences.get(0).get(Preference.ID_ATTRIBUTE), "Unexpected id"); return null; }); } @Test public void testDeleteById() { Subject.doAs(_subject, (PrivilegedAction<Void>) () -> { final String type = "X-testtype"; Map<String, Object> pref1Attributes = createPreferenceAttributes( null, null, type, "testpref", null, MYUSER_SERIALIZATION, null, Map.of()); Preference p1 = PreferenceFactory.fromAttributes(_configuredObject, pref1Attributes); Map<String, Object> pref2Attributes = createPreferenceAttributes( null, null, type, "testpref2", null, MYUSER_SERIALIZATION, null, Map.of()); Preference p2 = PreferenceFactory.fromAttributes(_configuredObject, pref2Attributes); awaitPreferenceFuture(_userPreferences.updateOrAppend(List.of(p1, p2))); UUID id = p1.getId(); final RequestInfo rootRequestInfo = RequestInfo.createPreferencesRequestInfo(List.of(), List.of(), Map.of("id", List.of(id.toString()))); _handler.handleDELETE(_userPreferences, rootRequestInfo); final Set<Preference> retrievedPreferences = awaitPreferenceFuture(_userPreferences.getPreferences()); assertEquals(1, (long) retrievedPreferences.size(), "Unexpected number of preferences"); assertTrue(retrievedPreferences.contains(p2), "Unexpected type in p1 map"); return null; }); } @Test public void testDeleteByTypeAndName() { final String preferenceType = "X-testtype"; final String preferenceName = "myprefname"; final RequestInfo requestInfo = RequestInfo.createPreferencesRequestInfo(List.of(), List.of(preferenceType, preferenceName)); final Map<String, Object> pref = new HashMap<>(); pref.put(Preference.VALUE_ATTRIBUTE, Map.of()); doTestDelete(preferenceType, preferenceName, requestInfo); } @Test public void testDeleteByType() { final String preferenceType = "X-testtype"; final String preferenceName = "myprefname"; final RequestInfo requestInfo = RequestInfo.createPreferencesRequestInfo(List.of(), List.of(preferenceType)); doTestDelete(preferenceType, preferenceName, requestInfo); } @Test public void testDeleteByRoot() { final String preferenceType = "X-testtype"; final String preferenceName = "myprefname"; final RequestInfo requestInfo = RequestInfo.createPreferencesRequestInfo(List.of(), List.of()); final Map<String, Object> pref = new HashMap<>(); pref.put(Preference.VALUE_ATTRIBUTE, Map.of()); doTestDelete(preferenceType, preferenceName, requestInfo); } @Test public void testGetVisiblePreferencesByRoot() { final String prefName = "testpref"; final String prefType = "X-testtype"; final RequestInfo rootRequestInfo = RequestInfo.createVisiblePreferencesRequestInfo(List.of(), List.of(), Map.of()); Subject.doAs(_subject, (PrivilegedAction<Void>) () -> { final Set<Preference> preferences = new HashSet<>(); Map<String, Object> pref1Attributes = createPreferenceAttributes( null, null, prefType, prefName, null, MYUSER_SERIALIZATION, Set.of(MYGROUP_SERIALIZATION), Map.of()); Preference p1 = PreferenceFactory.fromAttributes(_configuredObject, pref1Attributes); preferences.add(p1); Map<String, Object> pref2Attributes = createPreferenceAttributes( null, null, prefType, "testPref2", null, MYUSER_SERIALIZATION, Set.of(), Map.of()); Preference p2 = PreferenceFactory.fromAttributes(_configuredObject, pref2Attributes); preferences.add(p2); awaitPreferenceFuture(_userPreferences.updateOrAppend(preferences)); return null; }); Subject testSubject2 = TestPrincipalUtils.createTestSubject("testUser2", MYGROUP); Subject.doAs(testSubject2, (PrivilegedAction<Void>) () -> { Map<String, List<Map<String, Object>>> typeToPreferenceListMap = (Map<String, List<Map<String, Object>>>) _handler.handleGET(_userPreferences, rootRequestInfo); assertEquals(1, (long) typeToPreferenceListMap.size(), "Unexpected preference map size"); assertEquals(prefType, typeToPreferenceListMap.keySet().iterator().next(), "Unexpected prefType in preference map"); List<Map<String, Object>> preferences = typeToPreferenceListMap.get(prefType); assertEquals(1, (long) preferences.size(), "Unexpected number of preferences"); assertEquals(prefName, preferences.get(0).get(Preference.NAME_ATTRIBUTE), "Unexpected name of preferences"); Set<Principal> visibilityList = (Set<Principal>) preferences.get(0).get(Preference.VISIBILITY_LIST_ATTRIBUTE); assertEquals(1, (long) visibilityList.size(), "Unexpected number of principals in visibility list"); assertTrue(GenericPrincipal.principalsEqual(_groupPrincipal, visibilityList.iterator().next()), "Unexpected principal in visibility list"); assertTrue(GenericPrincipal.principalsEqual(_userPrincipal, (Principal) preferences.get(0) .get(Preference.OWNER_ATTRIBUTE)), "Unexpected owner"); return null; }); } @Test public void testGetVisiblePreferencesByType() { final String prefName = "testpref"; final String prefType = "X-testtype"; final RequestInfo rootRequestInfo = RequestInfo.createVisiblePreferencesRequestInfo(List.of(), List.of(prefType), Map.of()); Subject.doAs(_subject, (PrivilegedAction<Void>) () -> { final Set<Preference> preferences = new HashSet<>(); Map<String, Object> pref1Attributes = createPreferenceAttributes( null, null, prefType, prefName, null, MYUSER_SERIALIZATION, Set.of(MYGROUP_SERIALIZATION), Map.of()); Preference p1 = PreferenceFactory.fromAttributes(_configuredObject, pref1Attributes); preferences.add(p1); Map<String, Object> pref2Attributes = createPreferenceAttributes( null, null, prefType, "testPref2", null, MYUSER_SERIALIZATION, Set.of(), Map.of()); Preference p2 = PreferenceFactory.fromAttributes(_configuredObject, pref2Attributes); preferences.add(p2); awaitPreferenceFuture(_userPreferences.updateOrAppend(preferences)); return null; }); Subject testSubject2 = TestPrincipalUtils.createTestSubject("testUser2", MYGROUP); Subject.doAs(testSubject2, (PrivilegedAction<Void>) () -> { List<Map<String, Object>> preferences = (List<Map<String, Object>>) _handler.handleGET(_userPreferences, rootRequestInfo); assertEquals(1, (long) preferences.size(), "Unexpected number of preferences"); assertEquals(prefName, preferences.get(0).get(Preference.NAME_ATTRIBUTE), "Unexpected name of preferences"); Set<Principal> visibilityList = (Set<Principal>) preferences.get(0).get(Preference.VISIBILITY_LIST_ATTRIBUTE); assertEquals(1, (long) visibilityList.size(), "Unexpected number of principals in visibility list"); assertTrue(GenericPrincipal.principalsEqual(_groupPrincipal, visibilityList.iterator().next()), "Unexpected principal in visibility list"); assertTrue(GenericPrincipal.principalsEqual(_userPrincipal, (Principal) preferences.get(0) .get(Preference.OWNER_ATTRIBUTE)), "Unexpected owner"); return null; }); } @Test public void testGetVisiblePreferencesByTypeAndName() { final String prefName = "testpref"; final String prefType = "X-testtype"; final RequestInfo rootRequestInfo = RequestInfo.createVisiblePreferencesRequestInfo(List.of(), List.of(prefType, prefName), Map.of()); Subject.doAs(_subject, (PrivilegedAction<Void>) () -> { final Set<Preference> preferences = new HashSet<>(); Map<String, Object> pref1Attributes = createPreferenceAttributes( null, null, prefType, prefName, null, MYUSER_SERIALIZATION, Set.of(MYGROUP_SERIALIZATION), Map.of()); Preference p1 = PreferenceFactory.fromAttributes(_configuredObject, pref1Attributes); preferences.add(p1); Map<String, Object> pref2Attributes = createPreferenceAttributes( null, null, prefType, "testPref2", null, MYUSER_SERIALIZATION, Set.of(), Map.of()); Preference p2 = PreferenceFactory.fromAttributes(_configuredObject, pref2Attributes); preferences.add(p2); awaitPreferenceFuture(_userPreferences.updateOrAppend(preferences)); return null; }); Subject testSubject2 = TestPrincipalUtils.createTestSubject("testUser2", MYGROUP); Subject.doAs(testSubject2, (PrivilegedAction<Void>) () -> { Map<String, Object> preference = (Map<String, Object>) _handler.handleGET(_userPreferences, rootRequestInfo); assertEquals(prefName, preference.get(Preference.NAME_ATTRIBUTE), "Unexpected name of preferences"); Set<Principal> visibilityList = (Set<Principal>) preference.get(Preference.VISIBILITY_LIST_ATTRIBUTE); assertEquals(1, (long) visibilityList.size(), "Unexpected number of principals in visibility list"); assertTrue(GenericPrincipal.principalsEqual(_groupPrincipal, visibilityList.iterator().next()), "Unexpected principal in visibility list"); assertTrue(GenericPrincipal.principalsEqual(_userPrincipal, (Principal) preference.get(Preference.OWNER_ATTRIBUTE)), "Unexpected owner"); return null; }); } private void doTestDelete(final String preferenceType, final String preferenceName, final RequestInfo requestInfo) { Subject.doAs(_subject, (PrivilegedAction<Void>) () -> { Map<String, Object> preferenceAttributes = createPreferenceAttributes( null, null, preferenceType, preferenceName, null, MYUSER_SERIALIZATION, null, Map.of()); Preference preference = PreferenceFactory.fromAttributes(_configuredObject, preferenceAttributes); awaitPreferenceFuture(_userPreferences.updateOrAppend(Set.of(preference))); Set<Preference> retrievedPreferences = awaitPreferenceFuture(_userPreferences.getPreferences()); assertEquals(1, (long) retrievedPreferences.size(), "adding pref failed"); _handler.handleDELETE(_userPreferences, requestInfo); retrievedPreferences = awaitPreferenceFuture(_userPreferences.getPreferences()); assertEquals(0, (long) retrievedPreferences.size(), "Deletion of preference failed"); // this should be a noop _handler.handleDELETE(_userPreferences, requestInfo); return null; }); } }
googleads/google-ads-java
36,327
google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/resources/CampaignSearchTermInsight.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v19/resources/campaign_search_term_insight.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v19.resources; /** * <pre> * A Campaign search term view. * Historical data is available starting March 2023. * </pre> * * Protobuf type {@code google.ads.googleads.v19.resources.CampaignSearchTermInsight} */ public final class CampaignSearchTermInsight extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v19.resources.CampaignSearchTermInsight) CampaignSearchTermInsightOrBuilder { private static final long serialVersionUID = 0L; // Use CampaignSearchTermInsight.newBuilder() to construct. private CampaignSearchTermInsight(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CampaignSearchTermInsight() { resourceName_ = ""; categoryLabel_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new CampaignSearchTermInsight(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.resources.CampaignSearchTermInsightProto.internal_static_google_ads_googleads_v19_resources_CampaignSearchTermInsight_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.resources.CampaignSearchTermInsightProto.internal_static_google_ads_googleads_v19_resources_CampaignSearchTermInsight_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.resources.CampaignSearchTermInsight.class, com.google.ads.googleads.v19.resources.CampaignSearchTermInsight.Builder.class); } private int bitField0_; public static final int RESOURCE_NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object resourceName_ = ""; /** * <pre> * Output only. The resource name of the campaign level search term insight. * Campaign level search term insight resource names have the form: * * `customers/{customer_id}/campaignSearchTermInsights/{campaign_id}~{category_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ @java.lang.Override public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } } /** * <pre> * Output only. The resource name of the campaign level search term insight. * Campaign level search term insight resource names have the form: * * `customers/{customer_id}/campaignSearchTermInsights/{campaign_id}~{category_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ @java.lang.Override public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CATEGORY_LABEL_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object categoryLabel_ = ""; /** * <pre> * Output only. The label for the search category. An empty string denotes the * catch-all category for search terms that didn't fit into another category. * </pre> * * <code>optional string category_label = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return Whether the categoryLabel field is set. */ @java.lang.Override public boolean hasCategoryLabel() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * Output only. The label for the search category. An empty string denotes the * catch-all category for search terms that didn't fit into another category. * </pre> * * <code>optional string category_label = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The categoryLabel. */ @java.lang.Override public java.lang.String getCategoryLabel() { java.lang.Object ref = categoryLabel_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); categoryLabel_ = s; return s; } } /** * <pre> * Output only. The label for the search category. An empty string denotes the * catch-all category for search terms that didn't fit into another category. * </pre> * * <code>optional string category_label = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The bytes for categoryLabel. */ @java.lang.Override public com.google.protobuf.ByteString getCategoryLabelBytes() { java.lang.Object ref = categoryLabel_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); categoryLabel_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ID_FIELD_NUMBER = 3; private long id_ = 0L; /** * <pre> * Output only. The ID of the insight. * </pre> * * <code>optional int64 id = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return Whether the id field is set. */ @java.lang.Override public boolean hasId() { return ((bitField0_ & 0x00000002) != 0); } /** * <pre> * Output only. The ID of the insight. * </pre> * * <code>optional int64 id = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The id. */ @java.lang.Override public long getId() { return id_; } public static final int CAMPAIGN_ID_FIELD_NUMBER = 4; private long campaignId_ = 0L; /** * <pre> * Output only. The ID of the campaign. * </pre> * * <code>optional int64 campaign_id = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return Whether the campaignId field is set. */ @java.lang.Override public boolean hasCampaignId() { return ((bitField0_ & 0x00000004) != 0); } /** * <pre> * Output only. The ID of the campaign. * </pre> * * <code>optional int64 campaign_id = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The campaignId. */ @java.lang.Override public long getCampaignId() { return campaignId_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_); } if (((bitField0_ & 0x00000001) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, categoryLabel_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt64(3, id_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeInt64(4, campaignId_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, categoryLabel_); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream .computeInt64Size(3, id_); } if (((bitField0_ & 0x00000004) != 0)) { size += com.google.protobuf.CodedOutputStream .computeInt64Size(4, campaignId_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v19.resources.CampaignSearchTermInsight)) { return super.equals(obj); } com.google.ads.googleads.v19.resources.CampaignSearchTermInsight other = (com.google.ads.googleads.v19.resources.CampaignSearchTermInsight) obj; if (!getResourceName() .equals(other.getResourceName())) return false; if (hasCategoryLabel() != other.hasCategoryLabel()) return false; if (hasCategoryLabel()) { if (!getCategoryLabel() .equals(other.getCategoryLabel())) return false; } if (hasId() != other.hasId()) return false; if (hasId()) { if (getId() != other.getId()) return false; } if (hasCampaignId() != other.hasCampaignId()) return false; if (hasCampaignId()) { if (getCampaignId() != other.getCampaignId()) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER; hash = (53 * hash) + getResourceName().hashCode(); if (hasCategoryLabel()) { hash = (37 * hash) + CATEGORY_LABEL_FIELD_NUMBER; hash = (53 * hash) + getCategoryLabel().hashCode(); } if (hasId()) { hash = (37 * hash) + ID_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong( getId()); } if (hasCampaignId()) { hash = (37 * hash) + CAMPAIGN_ID_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong( getCampaignId()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v19.resources.CampaignSearchTermInsight parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.resources.CampaignSearchTermInsight parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.resources.CampaignSearchTermInsight parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.resources.CampaignSearchTermInsight parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.resources.CampaignSearchTermInsight parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.resources.CampaignSearchTermInsight parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.resources.CampaignSearchTermInsight parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.resources.CampaignSearchTermInsight parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.resources.CampaignSearchTermInsight parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.resources.CampaignSearchTermInsight parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.resources.CampaignSearchTermInsight parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.resources.CampaignSearchTermInsight parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v19.resources.CampaignSearchTermInsight prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * A Campaign search term view. * Historical data is available starting March 2023. * </pre> * * Protobuf type {@code google.ads.googleads.v19.resources.CampaignSearchTermInsight} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.resources.CampaignSearchTermInsight) com.google.ads.googleads.v19.resources.CampaignSearchTermInsightOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.resources.CampaignSearchTermInsightProto.internal_static_google_ads_googleads_v19_resources_CampaignSearchTermInsight_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.resources.CampaignSearchTermInsightProto.internal_static_google_ads_googleads_v19_resources_CampaignSearchTermInsight_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.resources.CampaignSearchTermInsight.class, com.google.ads.googleads.v19.resources.CampaignSearchTermInsight.Builder.class); } // Construct using com.google.ads.googleads.v19.resources.CampaignSearchTermInsight.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; resourceName_ = ""; categoryLabel_ = ""; id_ = 0L; campaignId_ = 0L; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v19.resources.CampaignSearchTermInsightProto.internal_static_google_ads_googleads_v19_resources_CampaignSearchTermInsight_descriptor; } @java.lang.Override public com.google.ads.googleads.v19.resources.CampaignSearchTermInsight getDefaultInstanceForType() { return com.google.ads.googleads.v19.resources.CampaignSearchTermInsight.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v19.resources.CampaignSearchTermInsight build() { com.google.ads.googleads.v19.resources.CampaignSearchTermInsight result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v19.resources.CampaignSearchTermInsight buildPartial() { com.google.ads.googleads.v19.resources.CampaignSearchTermInsight result = new com.google.ads.googleads.v19.resources.CampaignSearchTermInsight(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v19.resources.CampaignSearchTermInsight result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.resourceName_ = resourceName_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.categoryLabel_ = categoryLabel_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.id_ = id_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000008) != 0)) { result.campaignId_ = campaignId_; to_bitField0_ |= 0x00000004; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v19.resources.CampaignSearchTermInsight) { return mergeFrom((com.google.ads.googleads.v19.resources.CampaignSearchTermInsight)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v19.resources.CampaignSearchTermInsight other) { if (other == com.google.ads.googleads.v19.resources.CampaignSearchTermInsight.getDefaultInstance()) return this; if (!other.getResourceName().isEmpty()) { resourceName_ = other.resourceName_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasCategoryLabel()) { categoryLabel_ = other.categoryLabel_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasId()) { setId(other.getId()); } if (other.hasCampaignId()) { setCampaignId(other.getCampaignId()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { resourceName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { categoryLabel_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { id_ = input.readInt64(); bitField0_ |= 0x00000004; break; } // case 24 case 32: { campaignId_ = input.readInt64(); bitField0_ |= 0x00000008; break; } // case 32 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object resourceName_ = ""; /** * <pre> * Output only. The resource name of the campaign level search term insight. * Campaign level search term insight resource names have the form: * * `customers/{customer_id}/campaignSearchTermInsights/{campaign_id}~{category_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Output only. The resource name of the campaign level search term insight. * Campaign level search term insight resource names have the form: * * `customers/{customer_id}/campaignSearchTermInsights/{campaign_id}~{category_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Output only. The resource name of the campaign level search term insight. * Campaign level search term insight resource names have the form: * * `customers/{customer_id}/campaignSearchTermInsights/{campaign_id}~{category_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @param value The resourceName to set. * @return This builder for chaining. */ public Builder setResourceName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } resourceName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Output only. The resource name of the campaign level search term insight. * Campaign level search term insight resource names have the form: * * `customers/{customer_id}/campaignSearchTermInsights/{campaign_id}~{category_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearResourceName() { resourceName_ = getDefaultInstance().getResourceName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * <pre> * Output only. The resource name of the campaign level search term insight. * Campaign level search term insight resource names have the form: * * `customers/{customer_id}/campaignSearchTermInsights/{campaign_id}~{category_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @param value The bytes for resourceName to set. * @return This builder for chaining. */ public Builder setResourceNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); resourceName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object categoryLabel_ = ""; /** * <pre> * Output only. The label for the search category. An empty string denotes the * catch-all category for search terms that didn't fit into another category. * </pre> * * <code>optional string category_label = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return Whether the categoryLabel field is set. */ public boolean hasCategoryLabel() { return ((bitField0_ & 0x00000002) != 0); } /** * <pre> * Output only. The label for the search category. An empty string denotes the * catch-all category for search terms that didn't fit into another category. * </pre> * * <code>optional string category_label = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The categoryLabel. */ public java.lang.String getCategoryLabel() { java.lang.Object ref = categoryLabel_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); categoryLabel_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Output only. The label for the search category. An empty string denotes the * catch-all category for search terms that didn't fit into another category. * </pre> * * <code>optional string category_label = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The bytes for categoryLabel. */ public com.google.protobuf.ByteString getCategoryLabelBytes() { java.lang.Object ref = categoryLabel_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); categoryLabel_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Output only. The label for the search category. An empty string denotes the * catch-all category for search terms that didn't fit into another category. * </pre> * * <code>optional string category_label = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @param value The categoryLabel to set. * @return This builder for chaining. */ public Builder setCategoryLabel( java.lang.String value) { if (value == null) { throw new NullPointerException(); } categoryLabel_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * Output only. The label for the search category. An empty string denotes the * catch-all category for search terms that didn't fit into another category. * </pre> * * <code>optional string category_label = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return This builder for chaining. */ public Builder clearCategoryLabel() { categoryLabel_ = getDefaultInstance().getCategoryLabel(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * <pre> * Output only. The label for the search category. An empty string denotes the * catch-all category for search terms that didn't fit into another category. * </pre> * * <code>optional string category_label = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @param value The bytes for categoryLabel to set. * @return This builder for chaining. */ public Builder setCategoryLabelBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); categoryLabel_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private long id_ ; /** * <pre> * Output only. The ID of the insight. * </pre> * * <code>optional int64 id = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return Whether the id field is set. */ @java.lang.Override public boolean hasId() { return ((bitField0_ & 0x00000004) != 0); } /** * <pre> * Output only. The ID of the insight. * </pre> * * <code>optional int64 id = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The id. */ @java.lang.Override public long getId() { return id_; } /** * <pre> * Output only. The ID of the insight. * </pre> * * <code>optional int64 id = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @param value The id to set. * @return This builder for chaining. */ public Builder setId(long value) { id_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * <pre> * Output only. The ID of the insight. * </pre> * * <code>optional int64 id = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return This builder for chaining. */ public Builder clearId() { bitField0_ = (bitField0_ & ~0x00000004); id_ = 0L; onChanged(); return this; } private long campaignId_ ; /** * <pre> * Output only. The ID of the campaign. * </pre> * * <code>optional int64 campaign_id = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return Whether the campaignId field is set. */ @java.lang.Override public boolean hasCampaignId() { return ((bitField0_ & 0x00000008) != 0); } /** * <pre> * Output only. The ID of the campaign. * </pre> * * <code>optional int64 campaign_id = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The campaignId. */ @java.lang.Override public long getCampaignId() { return campaignId_; } /** * <pre> * Output only. The ID of the campaign. * </pre> * * <code>optional int64 campaign_id = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @param value The campaignId to set. * @return This builder for chaining. */ public Builder setCampaignId(long value) { campaignId_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * <pre> * Output only. The ID of the campaign. * </pre> * * <code>optional int64 campaign_id = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return This builder for chaining. */ public Builder clearCampaignId() { bitField0_ = (bitField0_ & ~0x00000008); campaignId_ = 0L; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.resources.CampaignSearchTermInsight) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v19.resources.CampaignSearchTermInsight) private static final com.google.ads.googleads.v19.resources.CampaignSearchTermInsight DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v19.resources.CampaignSearchTermInsight(); } public static com.google.ads.googleads.v19.resources.CampaignSearchTermInsight getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CampaignSearchTermInsight> PARSER = new com.google.protobuf.AbstractParser<CampaignSearchTermInsight>() { @java.lang.Override public CampaignSearchTermInsight parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CampaignSearchTermInsight> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CampaignSearchTermInsight> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v19.resources.CampaignSearchTermInsight getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
36,327
google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/resources/CampaignSearchTermInsight.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v20/resources/campaign_search_term_insight.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v20.resources; /** * <pre> * A Campaign search term view. * Historical data is available starting March 2023. * </pre> * * Protobuf type {@code google.ads.googleads.v20.resources.CampaignSearchTermInsight} */ public final class CampaignSearchTermInsight extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v20.resources.CampaignSearchTermInsight) CampaignSearchTermInsightOrBuilder { private static final long serialVersionUID = 0L; // Use CampaignSearchTermInsight.newBuilder() to construct. private CampaignSearchTermInsight(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CampaignSearchTermInsight() { resourceName_ = ""; categoryLabel_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new CampaignSearchTermInsight(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.resources.CampaignSearchTermInsightProto.internal_static_google_ads_googleads_v20_resources_CampaignSearchTermInsight_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.resources.CampaignSearchTermInsightProto.internal_static_google_ads_googleads_v20_resources_CampaignSearchTermInsight_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.resources.CampaignSearchTermInsight.class, com.google.ads.googleads.v20.resources.CampaignSearchTermInsight.Builder.class); } private int bitField0_; public static final int RESOURCE_NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object resourceName_ = ""; /** * <pre> * Output only. The resource name of the campaign level search term insight. * Campaign level search term insight resource names have the form: * * `customers/{customer_id}/campaignSearchTermInsights/{campaign_id}~{category_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ @java.lang.Override public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } } /** * <pre> * Output only. The resource name of the campaign level search term insight. * Campaign level search term insight resource names have the form: * * `customers/{customer_id}/campaignSearchTermInsights/{campaign_id}~{category_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ @java.lang.Override public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CATEGORY_LABEL_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object categoryLabel_ = ""; /** * <pre> * Output only. The label for the search category. An empty string denotes the * catch-all category for search terms that didn't fit into another category. * </pre> * * <code>optional string category_label = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return Whether the categoryLabel field is set. */ @java.lang.Override public boolean hasCategoryLabel() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * Output only. The label for the search category. An empty string denotes the * catch-all category for search terms that didn't fit into another category. * </pre> * * <code>optional string category_label = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The categoryLabel. */ @java.lang.Override public java.lang.String getCategoryLabel() { java.lang.Object ref = categoryLabel_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); categoryLabel_ = s; return s; } } /** * <pre> * Output only. The label for the search category. An empty string denotes the * catch-all category for search terms that didn't fit into another category. * </pre> * * <code>optional string category_label = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The bytes for categoryLabel. */ @java.lang.Override public com.google.protobuf.ByteString getCategoryLabelBytes() { java.lang.Object ref = categoryLabel_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); categoryLabel_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ID_FIELD_NUMBER = 3; private long id_ = 0L; /** * <pre> * Output only. The ID of the insight. * </pre> * * <code>optional int64 id = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return Whether the id field is set. */ @java.lang.Override public boolean hasId() { return ((bitField0_ & 0x00000002) != 0); } /** * <pre> * Output only. The ID of the insight. * </pre> * * <code>optional int64 id = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The id. */ @java.lang.Override public long getId() { return id_; } public static final int CAMPAIGN_ID_FIELD_NUMBER = 4; private long campaignId_ = 0L; /** * <pre> * Output only. The ID of the campaign. * </pre> * * <code>optional int64 campaign_id = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return Whether the campaignId field is set. */ @java.lang.Override public boolean hasCampaignId() { return ((bitField0_ & 0x00000004) != 0); } /** * <pre> * Output only. The ID of the campaign. * </pre> * * <code>optional int64 campaign_id = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The campaignId. */ @java.lang.Override public long getCampaignId() { return campaignId_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_); } if (((bitField0_ & 0x00000001) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, categoryLabel_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt64(3, id_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeInt64(4, campaignId_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, categoryLabel_); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream .computeInt64Size(3, id_); } if (((bitField0_ & 0x00000004) != 0)) { size += com.google.protobuf.CodedOutputStream .computeInt64Size(4, campaignId_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v20.resources.CampaignSearchTermInsight)) { return super.equals(obj); } com.google.ads.googleads.v20.resources.CampaignSearchTermInsight other = (com.google.ads.googleads.v20.resources.CampaignSearchTermInsight) obj; if (!getResourceName() .equals(other.getResourceName())) return false; if (hasCategoryLabel() != other.hasCategoryLabel()) return false; if (hasCategoryLabel()) { if (!getCategoryLabel() .equals(other.getCategoryLabel())) return false; } if (hasId() != other.hasId()) return false; if (hasId()) { if (getId() != other.getId()) return false; } if (hasCampaignId() != other.hasCampaignId()) return false; if (hasCampaignId()) { if (getCampaignId() != other.getCampaignId()) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER; hash = (53 * hash) + getResourceName().hashCode(); if (hasCategoryLabel()) { hash = (37 * hash) + CATEGORY_LABEL_FIELD_NUMBER; hash = (53 * hash) + getCategoryLabel().hashCode(); } if (hasId()) { hash = (37 * hash) + ID_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong( getId()); } if (hasCampaignId()) { hash = (37 * hash) + CAMPAIGN_ID_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong( getCampaignId()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v20.resources.CampaignSearchTermInsight parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.resources.CampaignSearchTermInsight parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.resources.CampaignSearchTermInsight parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.resources.CampaignSearchTermInsight parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.resources.CampaignSearchTermInsight parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.resources.CampaignSearchTermInsight parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.resources.CampaignSearchTermInsight parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.resources.CampaignSearchTermInsight parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.resources.CampaignSearchTermInsight parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.resources.CampaignSearchTermInsight parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.resources.CampaignSearchTermInsight parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.resources.CampaignSearchTermInsight parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v20.resources.CampaignSearchTermInsight prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * A Campaign search term view. * Historical data is available starting March 2023. * </pre> * * Protobuf type {@code google.ads.googleads.v20.resources.CampaignSearchTermInsight} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.resources.CampaignSearchTermInsight) com.google.ads.googleads.v20.resources.CampaignSearchTermInsightOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.resources.CampaignSearchTermInsightProto.internal_static_google_ads_googleads_v20_resources_CampaignSearchTermInsight_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.resources.CampaignSearchTermInsightProto.internal_static_google_ads_googleads_v20_resources_CampaignSearchTermInsight_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.resources.CampaignSearchTermInsight.class, com.google.ads.googleads.v20.resources.CampaignSearchTermInsight.Builder.class); } // Construct using com.google.ads.googleads.v20.resources.CampaignSearchTermInsight.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; resourceName_ = ""; categoryLabel_ = ""; id_ = 0L; campaignId_ = 0L; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v20.resources.CampaignSearchTermInsightProto.internal_static_google_ads_googleads_v20_resources_CampaignSearchTermInsight_descriptor; } @java.lang.Override public com.google.ads.googleads.v20.resources.CampaignSearchTermInsight getDefaultInstanceForType() { return com.google.ads.googleads.v20.resources.CampaignSearchTermInsight.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v20.resources.CampaignSearchTermInsight build() { com.google.ads.googleads.v20.resources.CampaignSearchTermInsight result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v20.resources.CampaignSearchTermInsight buildPartial() { com.google.ads.googleads.v20.resources.CampaignSearchTermInsight result = new com.google.ads.googleads.v20.resources.CampaignSearchTermInsight(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v20.resources.CampaignSearchTermInsight result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.resourceName_ = resourceName_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.categoryLabel_ = categoryLabel_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.id_ = id_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000008) != 0)) { result.campaignId_ = campaignId_; to_bitField0_ |= 0x00000004; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v20.resources.CampaignSearchTermInsight) { return mergeFrom((com.google.ads.googleads.v20.resources.CampaignSearchTermInsight)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v20.resources.CampaignSearchTermInsight other) { if (other == com.google.ads.googleads.v20.resources.CampaignSearchTermInsight.getDefaultInstance()) return this; if (!other.getResourceName().isEmpty()) { resourceName_ = other.resourceName_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasCategoryLabel()) { categoryLabel_ = other.categoryLabel_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasId()) { setId(other.getId()); } if (other.hasCampaignId()) { setCampaignId(other.getCampaignId()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { resourceName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { categoryLabel_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { id_ = input.readInt64(); bitField0_ |= 0x00000004; break; } // case 24 case 32: { campaignId_ = input.readInt64(); bitField0_ |= 0x00000008; break; } // case 32 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object resourceName_ = ""; /** * <pre> * Output only. The resource name of the campaign level search term insight. * Campaign level search term insight resource names have the form: * * `customers/{customer_id}/campaignSearchTermInsights/{campaign_id}~{category_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Output only. The resource name of the campaign level search term insight. * Campaign level search term insight resource names have the form: * * `customers/{customer_id}/campaignSearchTermInsights/{campaign_id}~{category_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Output only. The resource name of the campaign level search term insight. * Campaign level search term insight resource names have the form: * * `customers/{customer_id}/campaignSearchTermInsights/{campaign_id}~{category_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @param value The resourceName to set. * @return This builder for chaining. */ public Builder setResourceName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } resourceName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Output only. The resource name of the campaign level search term insight. * Campaign level search term insight resource names have the form: * * `customers/{customer_id}/campaignSearchTermInsights/{campaign_id}~{category_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearResourceName() { resourceName_ = getDefaultInstance().getResourceName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * <pre> * Output only. The resource name of the campaign level search term insight. * Campaign level search term insight resource names have the form: * * `customers/{customer_id}/campaignSearchTermInsights/{campaign_id}~{category_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @param value The bytes for resourceName to set. * @return This builder for chaining. */ public Builder setResourceNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); resourceName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object categoryLabel_ = ""; /** * <pre> * Output only. The label for the search category. An empty string denotes the * catch-all category for search terms that didn't fit into another category. * </pre> * * <code>optional string category_label = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return Whether the categoryLabel field is set. */ public boolean hasCategoryLabel() { return ((bitField0_ & 0x00000002) != 0); } /** * <pre> * Output only. The label for the search category. An empty string denotes the * catch-all category for search terms that didn't fit into another category. * </pre> * * <code>optional string category_label = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The categoryLabel. */ public java.lang.String getCategoryLabel() { java.lang.Object ref = categoryLabel_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); categoryLabel_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Output only. The label for the search category. An empty string denotes the * catch-all category for search terms that didn't fit into another category. * </pre> * * <code>optional string category_label = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The bytes for categoryLabel. */ public com.google.protobuf.ByteString getCategoryLabelBytes() { java.lang.Object ref = categoryLabel_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); categoryLabel_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Output only. The label for the search category. An empty string denotes the * catch-all category for search terms that didn't fit into another category. * </pre> * * <code>optional string category_label = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @param value The categoryLabel to set. * @return This builder for chaining. */ public Builder setCategoryLabel( java.lang.String value) { if (value == null) { throw new NullPointerException(); } categoryLabel_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * Output only. The label for the search category. An empty string denotes the * catch-all category for search terms that didn't fit into another category. * </pre> * * <code>optional string category_label = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return This builder for chaining. */ public Builder clearCategoryLabel() { categoryLabel_ = getDefaultInstance().getCategoryLabel(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * <pre> * Output only. The label for the search category. An empty string denotes the * catch-all category for search terms that didn't fit into another category. * </pre> * * <code>optional string category_label = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @param value The bytes for categoryLabel to set. * @return This builder for chaining. */ public Builder setCategoryLabelBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); categoryLabel_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private long id_ ; /** * <pre> * Output only. The ID of the insight. * </pre> * * <code>optional int64 id = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return Whether the id field is set. */ @java.lang.Override public boolean hasId() { return ((bitField0_ & 0x00000004) != 0); } /** * <pre> * Output only. The ID of the insight. * </pre> * * <code>optional int64 id = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The id. */ @java.lang.Override public long getId() { return id_; } /** * <pre> * Output only. The ID of the insight. * </pre> * * <code>optional int64 id = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @param value The id to set. * @return This builder for chaining. */ public Builder setId(long value) { id_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * <pre> * Output only. The ID of the insight. * </pre> * * <code>optional int64 id = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return This builder for chaining. */ public Builder clearId() { bitField0_ = (bitField0_ & ~0x00000004); id_ = 0L; onChanged(); return this; } private long campaignId_ ; /** * <pre> * Output only. The ID of the campaign. * </pre> * * <code>optional int64 campaign_id = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return Whether the campaignId field is set. */ @java.lang.Override public boolean hasCampaignId() { return ((bitField0_ & 0x00000008) != 0); } /** * <pre> * Output only. The ID of the campaign. * </pre> * * <code>optional int64 campaign_id = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The campaignId. */ @java.lang.Override public long getCampaignId() { return campaignId_; } /** * <pre> * Output only. The ID of the campaign. * </pre> * * <code>optional int64 campaign_id = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @param value The campaignId to set. * @return This builder for chaining. */ public Builder setCampaignId(long value) { campaignId_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * <pre> * Output only. The ID of the campaign. * </pre> * * <code>optional int64 campaign_id = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return This builder for chaining. */ public Builder clearCampaignId() { bitField0_ = (bitField0_ & ~0x00000008); campaignId_ = 0L; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.resources.CampaignSearchTermInsight) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v20.resources.CampaignSearchTermInsight) private static final com.google.ads.googleads.v20.resources.CampaignSearchTermInsight DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v20.resources.CampaignSearchTermInsight(); } public static com.google.ads.googleads.v20.resources.CampaignSearchTermInsight getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CampaignSearchTermInsight> PARSER = new com.google.protobuf.AbstractParser<CampaignSearchTermInsight>() { @java.lang.Override public CampaignSearchTermInsight parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CampaignSearchTermInsight> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CampaignSearchTermInsight> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v20.resources.CampaignSearchTermInsight getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/fineract
36,790
fineract-core/src/main/java/org/apache/fineract/portfolio/savings/data/SavingsProductData.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.fineract.portfolio.savings.data; import java.io.Serializable; import java.math.BigDecimal; import java.util.Collection; import java.util.List; import java.util.Map; import org.apache.fineract.accounting.common.AccountingRuleType; import org.apache.fineract.accounting.glaccount.data.GLAccountData; import org.apache.fineract.accounting.producttoaccountmapping.data.ChargeToGLAccountMapper; import org.apache.fineract.accounting.producttoaccountmapping.data.PaymentTypeToGLAccountMapper; import org.apache.fineract.infrastructure.core.data.EnumOptionData; import org.apache.fineract.organisation.monetary.data.CurrencyData; import org.apache.fineract.portfolio.charge.data.ChargeData; import org.apache.fineract.portfolio.paymenttype.data.PaymentTypeData; import org.apache.fineract.portfolio.tax.data.TaxGroupData; /** * Immutable data object represent a savings product. */ public final class SavingsProductData implements Serializable { private final Long id; private final String name; private final String shortName; private final String description; private final CurrencyData currency; private final BigDecimal nominalAnnualInterestRate; private final EnumOptionData interestCompoundingPeriodType; private final EnumOptionData interestPostingPeriodType; private final EnumOptionData interestCalculationType; private final EnumOptionData interestCalculationDaysInYearType; private final BigDecimal minRequiredOpeningBalance; private final Integer lockinPeriodFrequency; private final EnumOptionData lockinPeriodFrequencyType; private final boolean withdrawalFeeForTransfers; private final boolean allowOverdraft; private final BigDecimal overdraftLimit; private final BigDecimal minRequiredBalance; private final boolean enforceMinRequiredBalance; private final BigDecimal maxAllowedLienLimit; private final boolean lienAllowed; private final BigDecimal minBalanceForInterestCalculation; private final BigDecimal nominalAnnualInterestRateOverdraft; private final BigDecimal minOverdraftForInterestCalculation; private final boolean withHoldTax; private final TaxGroupData taxGroup; private String depositAccountType = null; private final String accountMappingForPayment; // accounting private final EnumOptionData accountingRule; private final Map<String, Object> accountingMappings; private final Collection<PaymentTypeToGLAccountMapper> paymentChannelToFundSourceMappings; private final Collection<ChargeToGLAccountMapper> feeToIncomeAccountMappings; private final Collection<ChargeToGLAccountMapper> penaltyToIncomeAccountMappings; // charges private final Collection<ChargeData> charges; // template private final Collection<CurrencyData> currencyOptions; private final Collection<EnumOptionData> interestCompoundingPeriodTypeOptions; private final Collection<EnumOptionData> interestPostingPeriodTypeOptions; private final Collection<EnumOptionData> interestCalculationTypeOptions; private final Collection<EnumOptionData> interestCalculationDaysInYearTypeOptions; private final Collection<EnumOptionData> lockinPeriodFrequencyTypeOptions; private final Collection<EnumOptionData> withdrawalFeeTypeOptions; private final Collection<PaymentTypeData> paymentTypeOptions; private final Collection<EnumOptionData> accountingRuleOptions; private final Map<String, List<GLAccountData>> accountingMappingOptions; private final Collection<ChargeData> chargeOptions; private final Collection<ChargeData> penaltyOptions; private final Collection<TaxGroupData> taxGroupOptions; private final Boolean isDormancyTrackingActive; private final Long daysToInactive; private final Long daysToDormancy; private final Long daysToEscheat; public static SavingsProductData template(final CurrencyData currency, final EnumOptionData interestCompoundingPeriodType, final EnumOptionData interestPostingPeriodType, final EnumOptionData interestCalculationType, final EnumOptionData interestCalculationDaysInYearType, final EnumOptionData accountingRule, final Collection<CurrencyData> currencyOptions, final Collection<EnumOptionData> interestCompoundingPeriodTypeOptions, final Collection<EnumOptionData> interestPostingPeriodTypeOptions, final Collection<EnumOptionData> interestCalculationTypeOptions, final Collection<EnumOptionData> interestCalculationDaysInYearTypeOptions, final Collection<EnumOptionData> lockinPeriodFrequencyTypeOptions, final Collection<EnumOptionData> withdrawalFeeTypeOptions, final Collection<PaymentTypeData> paymentTypeOptions, final Collection<EnumOptionData> accountingRuleOptions, final Map<String, List<GLAccountData>> accountingMappingOptions, final Collection<ChargeData> chargeOptions, final Collection<ChargeData> penaltyOptions, final Collection<TaxGroupData> taxGroupOptions, final String accountMappingForPayment) { final Long id = null; final String name = null; final String shortName = null; final String description = null; final BigDecimal nominalAnnualInterestRate = null; final BigDecimal minRequiredOpeningBalance = null; final Integer lockinPeriodFrequency = null; final EnumOptionData lockinPeriodFrequencyType = null; final boolean withdrawalFeeForTransfers = false; final Map<String, Object> accountingMappings = null; final Collection<PaymentTypeToGLAccountMapper> paymentChannelToFundSourceMappings = null; final Collection<ChargeData> charges = null; final Collection<ChargeToGLAccountMapper> feeToIncomeAccountMappings = null; final Collection<ChargeToGLAccountMapper> penaltyToIncomeAccountMappings = null; final boolean allowOverdraft = false; final BigDecimal overdraftLimit = null; final BigDecimal minRequiredBalance = null; final boolean enforceMinRequiredBalance = false; final BigDecimal maxAllowedLienLimit = null; final boolean lienAllowed = false; final BigDecimal minBalanceForInterestCalculation = null; final BigDecimal nominalAnnualInterestRateOverdraft = null; final BigDecimal minOverdraftForInterestCalculation = null; final boolean withHoldTax = false; final TaxGroupData taxGroup = null; final Boolean isDormancyTrackingActive = false; final Long daysToInactive = null; final Long daysToDormancy = null; final Long daysToEscheat = null; return new SavingsProductData(id, name, shortName, description, currency, nominalAnnualInterestRate, interestCompoundingPeriodType, interestPostingPeriodType, interestCalculationType, interestCalculationDaysInYearType, minRequiredOpeningBalance, lockinPeriodFrequency, lockinPeriodFrequencyType, withdrawalFeeForTransfers, accountingRule, accountingMappings, paymentChannelToFundSourceMappings, currencyOptions, interestCompoundingPeriodTypeOptions, interestPostingPeriodTypeOptions, interestCalculationTypeOptions, interestCalculationDaysInYearTypeOptions, lockinPeriodFrequencyTypeOptions, withdrawalFeeTypeOptions, paymentTypeOptions, accountingRuleOptions, accountingMappingOptions, charges, chargeOptions, penaltyOptions, feeToIncomeAccountMappings, penaltyToIncomeAccountMappings, allowOverdraft, overdraftLimit, minRequiredBalance, enforceMinRequiredBalance, maxAllowedLienLimit, lienAllowed, minBalanceForInterestCalculation, nominalAnnualInterestRateOverdraft, minOverdraftForInterestCalculation, withHoldTax, taxGroup, taxGroupOptions, isDormancyTrackingActive, daysToInactive, daysToDormancy, daysToEscheat, accountMappingForPayment); } public static SavingsProductData withCharges(final SavingsProductData product, final Collection<ChargeData> charges) { return new SavingsProductData(product.id, product.name, product.shortName, product.description, product.currency, product.nominalAnnualInterestRate, product.interestCompoundingPeriodType, product.interestPostingPeriodType, product.interestCalculationType, product.interestCalculationDaysInYearType, product.minRequiredOpeningBalance, product.lockinPeriodFrequency, product.lockinPeriodFrequencyType, product.withdrawalFeeForTransfers, product.accountingRule, product.accountingMappings, product.paymentChannelToFundSourceMappings, product.currencyOptions, product.interestCompoundingPeriodTypeOptions, product.interestPostingPeriodTypeOptions, product.interestCalculationTypeOptions, product.interestCalculationDaysInYearTypeOptions, product.lockinPeriodFrequencyTypeOptions, product.withdrawalFeeTypeOptions, product.paymentTypeOptions, product.accountingRuleOptions, product.accountingMappingOptions, charges, product.chargeOptions, product.penaltyOptions, product.feeToIncomeAccountMappings, product.penaltyToIncomeAccountMappings, product.allowOverdraft, product.overdraftLimit, product.minRequiredBalance, product.enforceMinRequiredBalance, product.maxAllowedLienLimit, product.lienAllowed, product.minBalanceForInterestCalculation, product.nominalAnnualInterestRateOverdraft, product.minOverdraftForInterestCalculation, product.withHoldTax, product.taxGroup, product.taxGroupOptions, product.isDormancyTrackingActive, product.daysToInactive, product.daysToDormancy, product.daysToEscheat, product.accountMappingForPayment); } /** * Returns a {@link SavingsProductData} that contains and exist {@link SavingsProductData} data with further * template data for dropdowns. * * @param taxGroupOptions * TODO * @param accountMapping */ public static SavingsProductData withTemplate(final SavingsProductData existingProduct, final Collection<CurrencyData> currencyOptions, final Collection<EnumOptionData> interestCompoundingPeriodTypeOptions, final Collection<EnumOptionData> interestPostingPeriodTypeOptions, final Collection<EnumOptionData> interestCalculationTypeOptions, final Collection<EnumOptionData> interestCalculationDaysInYearTypeOptions, final Collection<EnumOptionData> lockinPeriodFrequencyTypeOptions, final Collection<EnumOptionData> withdrawalFeeTypeOptions, final Collection<PaymentTypeData> paymentTypeOptions, final Collection<EnumOptionData> accountingRuleOptions, final Map<String, List<GLAccountData>> accountingMappingOptions, final Collection<ChargeData> chargeOptions, final Collection<ChargeData> penaltyOptions, Collection<TaxGroupData> taxGroupOptions, final String accountMappingForPayment) { return new SavingsProductData(existingProduct.id, existingProduct.name, existingProduct.shortName, existingProduct.description, existingProduct.currency, existingProduct.nominalAnnualInterestRate, existingProduct.interestCompoundingPeriodType, existingProduct.interestPostingPeriodType, existingProduct.interestCalculationType, existingProduct.interestCalculationDaysInYearType, existingProduct.minRequiredOpeningBalance, existingProduct.lockinPeriodFrequency, existingProduct.lockinPeriodFrequencyType, existingProduct.withdrawalFeeForTransfers, existingProduct.accountingRule, existingProduct.accountingMappings, existingProduct.paymentChannelToFundSourceMappings, currencyOptions, interestCompoundingPeriodTypeOptions, interestPostingPeriodTypeOptions, interestCalculationTypeOptions, interestCalculationDaysInYearTypeOptions, lockinPeriodFrequencyTypeOptions, withdrawalFeeTypeOptions, paymentTypeOptions, accountingRuleOptions, accountingMappingOptions, existingProduct.charges, chargeOptions, penaltyOptions, existingProduct.feeToIncomeAccountMappings, existingProduct.penaltyToIncomeAccountMappings, existingProduct.allowOverdraft, existingProduct.overdraftLimit, existingProduct.minRequiredBalance, existingProduct.enforceMinRequiredBalance, existingProduct.maxAllowedLienLimit, existingProduct.lienAllowed, existingProduct.minBalanceForInterestCalculation, existingProduct.nominalAnnualInterestRateOverdraft, existingProduct.minOverdraftForInterestCalculation, existingProduct.withHoldTax, existingProduct.taxGroup, taxGroupOptions, existingProduct.isDormancyTrackingActive, existingProduct.daysToInactive, existingProduct.daysToDormancy, existingProduct.daysToEscheat, accountMappingForPayment); } public static SavingsProductData withAccountingDetails(final SavingsProductData existingProduct, final Map<String, Object> accountingMappings, final Collection<PaymentTypeToGLAccountMapper> paymentChannelToFundSourceMappings, final Collection<ChargeToGLAccountMapper> feeToIncomeAccountMappings, final Collection<ChargeToGLAccountMapper> penaltyToIncomeAccountMappings) { final Collection<CurrencyData> currencyOptions = null; final Collection<EnumOptionData> interestCompoundingPeriodTypeOptions = null; final Collection<EnumOptionData> interestPostingPeriodTypeOptions = null; final Collection<EnumOptionData> interestCalculationTypeOptions = null; final Collection<EnumOptionData> interestCalculationDaysInYearTypeOptions = null; final Collection<EnumOptionData> lockinPeriodFrequencyTypeOptions = null; final Collection<EnumOptionData> withdrawalFeeTypeOptions = null; final Collection<PaymentTypeData> paymentTypeOptions = null; final Collection<EnumOptionData> accountingRuleOptions = null; final Map<String, List<GLAccountData>> accountingMappingOptions = null; final Collection<ChargeData> chargeOptions = null; final Collection<ChargeData> penaltyOptions = null; final String accountMappingForPayment = null; return new SavingsProductData(existingProduct.id, existingProduct.name, existingProduct.shortName, existingProduct.description, existingProduct.currency, existingProduct.nominalAnnualInterestRate, existingProduct.interestCompoundingPeriodType, existingProduct.interestPostingPeriodType, existingProduct.interestCalculationType, existingProduct.interestCalculationDaysInYearType, existingProduct.minRequiredOpeningBalance, existingProduct.lockinPeriodFrequency, existingProduct.lockinPeriodFrequencyType, existingProduct.withdrawalFeeForTransfers, existingProduct.accountingRule, accountingMappings, paymentChannelToFundSourceMappings, currencyOptions, interestCompoundingPeriodTypeOptions, interestPostingPeriodTypeOptions, interestCalculationTypeOptions, interestCalculationDaysInYearTypeOptions, lockinPeriodFrequencyTypeOptions, withdrawalFeeTypeOptions, paymentTypeOptions, accountingRuleOptions, accountingMappingOptions, existingProduct.charges, chargeOptions, penaltyOptions, feeToIncomeAccountMappings, penaltyToIncomeAccountMappings, existingProduct.allowOverdraft, existingProduct.overdraftLimit, existingProduct.minRequiredBalance, existingProduct.enforceMinRequiredBalance, existingProduct.maxAllowedLienLimit, existingProduct.lienAllowed, existingProduct.minBalanceForInterestCalculation, existingProduct.nominalAnnualInterestRateOverdraft, existingProduct.minOverdraftForInterestCalculation, existingProduct.withHoldTax, existingProduct.taxGroup, existingProduct.taxGroupOptions, existingProduct.isDormancyTrackingActive, existingProduct.daysToInactive, existingProduct.daysToDormancy, existingProduct.daysToEscheat, existingProduct.accountMappingForPayment); } public static SavingsProductData instance(final Long id, final String name, final String shortName, final String description, final CurrencyData currency, final BigDecimal nominalAnnualInterestRate, final EnumOptionData interestCompoundingPeriodType, final EnumOptionData interestPostingPeriodType, final EnumOptionData interestCalculationType, final EnumOptionData interestCalculationDaysInYearType, final BigDecimal minRequiredOpeningBalance, final Integer lockinPeriodFrequency, final EnumOptionData lockinPeriodFrequencyType, final boolean withdrawalFeeForTransfers, final EnumOptionData accountingType, final boolean allowOverdraft, final BigDecimal overdraftLimit, final BigDecimal minRequiredBalance, final boolean enforceMinRequiredBalance, final BigDecimal maxAllowedLienLimit, final boolean lienAllowed, final BigDecimal minBalanceForInterestCalculation, final BigDecimal nominalAnnualInterestRateOverdraft, final BigDecimal minOverdraftForInterestCalculation, final boolean withHoldTax, final TaxGroupData taxGroup, final Boolean isDormancyTrackingActive, final Long daysToInactive, final Long daysToDormancy, final Long daysToEscheat) { final Map<String, Object> accountingMappings = null; final Collection<PaymentTypeToGLAccountMapper> paymentChannelToFundSourceMappings = null; final Collection<CurrencyData> currencyOptions = null; final Collection<EnumOptionData> interestCompoundingPeriodTypeOptions = null; final Collection<EnumOptionData> interestPostingPeriodTypeOptions = null; final Collection<EnumOptionData> interestCalculationTypeOptions = null; final Collection<EnumOptionData> interestCalculationDaysInYearTypeOptions = null; final Collection<EnumOptionData> lockinPeriodFrequencyTypeOptions = null; final Collection<EnumOptionData> withdrawalFeeTypeOptions = null; final Collection<PaymentTypeData> paymentTypeOptions = null; final Collection<EnumOptionData> accountingRuleOptions = null; final Map<String, List<GLAccountData>> accountingMappingOptions = null; final Collection<ChargeData> chargeOptions = null; final Collection<ChargeData> penaltyOptions = null; final Collection<ChargeData> charges = null; final Collection<ChargeToGLAccountMapper> feeToIncomeAccountMappings = null; final Collection<ChargeToGLAccountMapper> penaltyToIncomeAccountMappings = null; final Collection<TaxGroupData> taxGroupOptions = null; final String accountMappingForPayment = null; return new SavingsProductData(id, name, shortName, description, currency, nominalAnnualInterestRate, interestCompoundingPeriodType, interestPostingPeriodType, interestCalculationType, interestCalculationDaysInYearType, minRequiredOpeningBalance, lockinPeriodFrequency, lockinPeriodFrequencyType, withdrawalFeeForTransfers, accountingType, accountingMappings, paymentChannelToFundSourceMappings, currencyOptions, interestCompoundingPeriodTypeOptions, interestPostingPeriodTypeOptions, interestCalculationTypeOptions, interestCalculationDaysInYearTypeOptions, lockinPeriodFrequencyTypeOptions, withdrawalFeeTypeOptions, paymentTypeOptions, accountingRuleOptions, accountingMappingOptions, charges, chargeOptions, penaltyOptions, feeToIncomeAccountMappings, penaltyToIncomeAccountMappings, allowOverdraft, overdraftLimit, minRequiredBalance, enforceMinRequiredBalance, maxAllowedLienLimit, lienAllowed, minBalanceForInterestCalculation, nominalAnnualInterestRateOverdraft, minOverdraftForInterestCalculation, withHoldTax, taxGroup, taxGroupOptions, isDormancyTrackingActive, daysToInactive, daysToDormancy, daysToEscheat, accountMappingForPayment); } public static SavingsProductData lookup(final Long id, final String name) { final String shortName = null; final CurrencyData currency = null; final String description = null; final BigDecimal nominalAnnualInterestRate = null; final EnumOptionData interestCompoundingPeriodType = null; final EnumOptionData interestPostingPeriodType = null; final EnumOptionData interestCalculationType = null; final EnumOptionData interestCalculationDaysInYearType = null; final BigDecimal minRequiredOpeningBalance = null; final Integer lockinPeriodFrequency = null; final EnumOptionData lockinPeriodFrequencyType = null; final boolean withdrawalFeeForTransfers = false; final EnumOptionData accountingType = null; final Map<String, Object> accountingMappings = null; final Collection<PaymentTypeToGLAccountMapper> paymentChannelToFundSourceMappings = null; final boolean allowOverdraft = false; final BigDecimal overdraftLimit = null; final BigDecimal nominalAnnualInterestRateOverdraft = null; final BigDecimal minOverdraftForInterestCalculation = null; final BigDecimal minRequiredBalance = null; final boolean enforceMinRequiredBalance = false; final BigDecimal maxAllowedLienLimit = null; final boolean lienAllowed = false; final BigDecimal minBalanceForInterestCalculation = null; final boolean withHoldTax = false; final TaxGroupData taxGroup = null; final Collection<CurrencyData> currencyOptions = null; final Collection<EnumOptionData> interestCompoundingPeriodTypeOptions = null; final Collection<EnumOptionData> interestPostingPeriodTypeOptions = null; final Collection<EnumOptionData> interestCalculationTypeOptions = null; final Collection<EnumOptionData> interestCalculationDaysInYearTypeOptions = null; final Collection<EnumOptionData> lockinPeriodFrequencyTypeOptions = null; final Collection<EnumOptionData> withdrawalFeeTypeOptions = null; final Collection<PaymentTypeData> paymentTypeOptions = null; final Collection<EnumOptionData> accountingRuleOptions = null; final Map<String, List<GLAccountData>> accountingMappingOptions = null; final Collection<ChargeData> charges = null; final Collection<ChargeData> chargeOptions = null; final Collection<ChargeData> penaltyOptions = null; final Collection<ChargeToGLAccountMapper> feeToIncomeAccountMappings = null; final Collection<ChargeToGLAccountMapper> penaltyToIncomeAccountMappings = null; final Collection<TaxGroupData> taxGroupOptions = null; final Boolean isDormancyTrackingActive = null; final Long daysToInactive = null; final Long daysToDormancy = null; final Long daysToEscheat = null; final String accountMappingForPayment = null; return new SavingsProductData(id, name, shortName, description, currency, nominalAnnualInterestRate, interestCompoundingPeriodType, interestPostingPeriodType, interestCalculationType, interestCalculationDaysInYearType, minRequiredOpeningBalance, lockinPeriodFrequency, lockinPeriodFrequencyType, withdrawalFeeForTransfers, accountingType, accountingMappings, paymentChannelToFundSourceMappings, currencyOptions, interestCompoundingPeriodTypeOptions, interestPostingPeriodTypeOptions, interestCalculationTypeOptions, interestCalculationDaysInYearTypeOptions, lockinPeriodFrequencyTypeOptions, withdrawalFeeTypeOptions, paymentTypeOptions, accountingRuleOptions, accountingMappingOptions, charges, chargeOptions, penaltyOptions, feeToIncomeAccountMappings, penaltyToIncomeAccountMappings, allowOverdraft, overdraftLimit, minRequiredBalance, enforceMinRequiredBalance, maxAllowedLienLimit, lienAllowed, minBalanceForInterestCalculation, nominalAnnualInterestRateOverdraft, minOverdraftForInterestCalculation, withHoldTax, taxGroup, taxGroupOptions, isDormancyTrackingActive, daysToInactive, daysToDormancy, daysToEscheat, accountMappingForPayment); } public static SavingsProductData createForInterestPosting(final Long id, final EnumOptionData accountingRule) { return new SavingsProductData(id, accountingRule); } private SavingsProductData(final Long id, final EnumOptionData accountingRule) { this.id = id; this.name = null; this.shortName = null; this.description = null; this.currency = null; this.nominalAnnualInterestRate = null; this.interestCompoundingPeriodType = null; this.interestPostingPeriodType = null; this.interestCalculationType = null; this.interestCalculationDaysInYearType = null; this.accountingRule = accountingRule; this.minRequiredOpeningBalance = null; this.lockinPeriodFrequency = null; this.lockinPeriodFrequencyType = null; this.withdrawalFeeForTransfers = false; this.currencyOptions = null; this.interestCompoundingPeriodTypeOptions = null; this.interestPostingPeriodTypeOptions = null; this.interestCalculationTypeOptions = null; this.interestCalculationDaysInYearTypeOptions = null; this.lockinPeriodFrequencyTypeOptions = null; this.withdrawalFeeTypeOptions = null; this.paymentTypeOptions = null; this.accountingMappingOptions = null; this.accountingRuleOptions = null; this.accountingMappings = null; this.paymentChannelToFundSourceMappings = null; this.charges = null;// charges associated with Savings product this.chargeOptions = null;// charges available for adding to // Savings product this.penaltyOptions = null;// penalties available for adding // to Savings product this.feeToIncomeAccountMappings = null; this.penaltyToIncomeAccountMappings = null; this.allowOverdraft = false; this.overdraftLimit = null; this.minRequiredBalance = null; this.enforceMinRequiredBalance = false; this.maxAllowedLienLimit = null; this.lienAllowed = false; this.minBalanceForInterestCalculation = null; this.nominalAnnualInterestRateOverdraft = null; this.minOverdraftForInterestCalculation = null; this.taxGroup = null; this.withHoldTax = false; this.taxGroupOptions = null; this.isDormancyTrackingActive = null; this.daysToInactive = null; this.daysToDormancy = null; this.daysToEscheat = null; this.accountMappingForPayment = null; } private SavingsProductData(final Long id, final String name, final String shortName, final String description, final CurrencyData currency, final BigDecimal nominalAnnualInterestRate, final EnumOptionData interestCompoundingPeriodType, final EnumOptionData interestPostingPeriodType, final EnumOptionData interestCalculationType, final EnumOptionData interestCalculationDaysInYearType, final BigDecimal minRequiredOpeningBalance, final Integer lockinPeriodFrequency, final EnumOptionData lockinPeriodFrequencyType, final boolean withdrawalFeeForTransfers, final EnumOptionData accountingType, final Map<String, Object> accountingMappings, final Collection<PaymentTypeToGLAccountMapper> paymentChannelToFundSourceMappings, final Collection<CurrencyData> currencyOptions, final Collection<EnumOptionData> interestCompoundingPeriodTypeOptions, final Collection<EnumOptionData> interestPostingPeriodTypeOptions, final Collection<EnumOptionData> interestCalculationTypeOptions, final Collection<EnumOptionData> interestCalculationDaysInYearTypeOptions, final Collection<EnumOptionData> lockinPeriodFrequencyTypeOptions, final Collection<EnumOptionData> withdrawalFeeTypeOptions, final Collection<PaymentTypeData> paymentTypeOptions, final Collection<EnumOptionData> accountingRuleOptions, final Map<String, List<GLAccountData>> accountingMappingOptions, final Collection<ChargeData> charges, final Collection<ChargeData> chargeOptions, final Collection<ChargeData> penaltyOptions, final Collection<ChargeToGLAccountMapper> feeToIncomeAccountMappings, final Collection<ChargeToGLAccountMapper> penaltyToIncomeAccountMappings, final boolean allowOverdraft, final BigDecimal overdraftLimit, final BigDecimal minRequiredBalance, final boolean enforceMinRequiredBalance, final BigDecimal maxAllowedLienLimit, final boolean lienAllowed, final BigDecimal minBalanceForInterestCalculation, final BigDecimal nominalAnnualInterestRateOverdraft, final BigDecimal minOverdraftForInterestCalculation, final boolean withHoldTax, final TaxGroupData taxGroup, final Collection<TaxGroupData> taxGroupOptions, final Boolean isDormancyTrackingActive, final Long daysToInactive, final Long daysToDormancy, final Long daysToEscheat, final String accountMappingForPayment) { this.id = id; this.name = name; this.shortName = shortName; this.description = description; this.currency = currency; this.nominalAnnualInterestRate = nominalAnnualInterestRate; this.interestCompoundingPeriodType = interestCompoundingPeriodType; this.interestPostingPeriodType = interestPostingPeriodType; this.interestCalculationType = interestCalculationType; this.interestCalculationDaysInYearType = interestCalculationDaysInYearType; this.accountingRule = accountingType; this.minRequiredOpeningBalance = minRequiredOpeningBalance; this.lockinPeriodFrequency = lockinPeriodFrequency; this.lockinPeriodFrequencyType = lockinPeriodFrequencyType; this.withdrawalFeeForTransfers = withdrawalFeeForTransfers; this.currencyOptions = currencyOptions; this.interestCompoundingPeriodTypeOptions = interestCompoundingPeriodTypeOptions; this.interestPostingPeriodTypeOptions = interestPostingPeriodTypeOptions; this.interestCalculationTypeOptions = interestCalculationTypeOptions; this.interestCalculationDaysInYearTypeOptions = interestCalculationDaysInYearTypeOptions; this.lockinPeriodFrequencyTypeOptions = lockinPeriodFrequencyTypeOptions; this.withdrawalFeeTypeOptions = withdrawalFeeTypeOptions; this.paymentTypeOptions = paymentTypeOptions; this.accountingMappingOptions = accountingMappingOptions; this.accountingRuleOptions = accountingRuleOptions; if (accountingMappings == null || accountingMappings.isEmpty()) { this.accountingMappings = null; } else { this.accountingMappings = accountingMappings; } this.paymentChannelToFundSourceMappings = paymentChannelToFundSourceMappings; this.charges = charges;// charges associated with Savings product this.chargeOptions = chargeOptions;// charges available for adding to // Savings product this.penaltyOptions = penaltyOptions;// penalties available for adding // to Savings product this.feeToIncomeAccountMappings = feeToIncomeAccountMappings; this.penaltyToIncomeAccountMappings = penaltyToIncomeAccountMappings; this.allowOverdraft = allowOverdraft; this.overdraftLimit = overdraftLimit; this.minRequiredBalance = minRequiredBalance; this.enforceMinRequiredBalance = enforceMinRequiredBalance; this.maxAllowedLienLimit = maxAllowedLienLimit; this.lienAllowed = lienAllowed; this.minBalanceForInterestCalculation = minBalanceForInterestCalculation; this.nominalAnnualInterestRateOverdraft = nominalAnnualInterestRateOverdraft; this.minOverdraftForInterestCalculation = minOverdraftForInterestCalculation; this.taxGroup = taxGroup; this.withHoldTax = withHoldTax; this.taxGroupOptions = taxGroupOptions; this.isDormancyTrackingActive = isDormancyTrackingActive; this.daysToInactive = daysToInactive; this.daysToDormancy = daysToDormancy; this.daysToEscheat = daysToEscheat; this.accountMappingForPayment = accountMappingForPayment; } public boolean hasAccountingEnabled() { return this.accountingRule.getId() > AccountingRuleType.NONE.getValue(); } public int accountingRuleTypeId() { return this.accountingRule.getId().intValue(); } @Override public boolean equals(final Object obj) { if (!(obj instanceof SavingsProductData)) { return false; } final SavingsProductData productData = (SavingsProductData) obj; return productData.id.compareTo(this.id) == 0; } @Override public int hashCode() { return this.id.hashCode(); } public String getName() { return this.name; } public String getDepositAccountType() { return depositAccountType; } public void setDepositAccountType(String depositAccountType) { this.depositAccountType = depositAccountType; } public BigDecimal getNominalAnnualInterestRate() { return nominalAnnualInterestRate; } public CurrencyData getCurrency() { return currency; } public Integer getLockinPeriodFrequency() { return lockinPeriodFrequency; } public EnumOptionData getLockinPeriodFrequencyType() { return lockinPeriodFrequencyType; } public BigDecimal getOverdraftLimit() { return overdraftLimit; } public BigDecimal getMinRequiredOpeningBalance() { return minRequiredOpeningBalance; } public EnumOptionData getInterestCompoundingPeriodType() { return interestCompoundingPeriodType; } public EnumOptionData getInterestPostingPeriodType() { return interestPostingPeriodType; } public EnumOptionData getInterestCalculationType() { return interestCalculationType; } public EnumOptionData getInterestCalculationDaysInYearType() { return interestCalculationDaysInYearType; } public boolean isAllowOverdraft() { return allowOverdraft; } public BigDecimal getMinRequiredBalance() { return minRequiredBalance; } public BigDecimal getMaxAllowedLienLimit() { return maxAllowedLienLimit; } public Long getId() { return id; } public boolean isWithdrawalFeeForTransfers() { return withdrawalFeeForTransfers; } public boolean isCashBasedAccountingEnabled() { return AccountingRuleType.CASH_BASED.getValue().toString().equals(this.accountingRule.getValue()); } public boolean isAccrualBasedAccountingEnabled() { return isUpfrontAccrualAccounting() || isPeriodicAccrualAccounting(); } public boolean isUpfrontAccrualAccounting() { return AccountingRuleType.ACCRUAL_UPFRONT.getValue().toString().equals(this.accountingRule.getValue()); } public boolean isPeriodicAccrualAccounting() { return AccountingRuleType.ACCRUAL_PERIODIC.getValue().toString().equals(this.accountingRule.getValue()); } }
oracle/graal
35,909
espresso/src/com.oracle.truffle.espresso.libjavavm/src/com/oracle/truffle/espresso/libjavavm/jniapi/JNINativeInterface.java
/* * Copyright (c) 2020, 2020, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.oracle.truffle.espresso.libjavavm.jniapi; import org.graalvm.nativeimage.c.CContext; import org.graalvm.nativeimage.c.function.CFunctionPointer; import org.graalvm.nativeimage.c.struct.CField; import org.graalvm.nativeimage.c.struct.CStruct; import org.graalvm.nativeimage.c.type.WordPointer; import org.graalvm.word.PointerBase; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.CallBooleanMethodAFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.CallIntMethodAFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.CallLongMethodAFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.CallObjectMethodAFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.CallStaticLongMethodAFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.CallStaticObjectMethodAFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.CallVoidMethodAFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.DefineClassFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.DeleteGlobalRefFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.ExceptionCheckFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.ExceptionOccurredFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.ExceptionVoidFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.FindClassFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.FromReflectedFieldFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.FromReflectedMethodFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.GetArrayLengthFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.GetBooleanFieldFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.GetByteArrayElementsFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.GetFieldIDFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.GetMethodIDFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.GetObjectArrayElementFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.GetObjectClassFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.GetObjectFieldFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.GetStaticObjectFieldFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.GetStringUTFCharsFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.GetSuperclassFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.IsAssignableFromFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.IsSameObjectFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.NewByteArrayFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.NewGlobalRefFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.NewObjectAFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.NewObjectArrayFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.NewStringUTFFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.RegisterNativesFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.ReleaseByteArrayElementsFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.ReleaseStringUTFCharsFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.SetObjectArrayElementFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.ThrowFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.ThrowNewFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.ToReflectedFieldFunctionPointer; import com.oracle.truffle.espresso.libjavavm.jniapi.JNIFunctionPointerTypes.ToReflectedMethodFunctionPointer; @CContext(JNIHeaderDirectives.class) @CStruct(value = "JNINativeInterface_", addStructKeyword = true) public interface JNINativeInterface extends PointerBase { @CField WordPointer reserved0(); @CField WordPointer reserved1(); @CField WordPointer reserved2(); @CField WordPointer reserved3(); @CField CFunctionPointer getGetVersion(); @CField void setGetVersion(CFunctionPointer p); @CField DefineClassFunctionPointer getDefineClass(); @CField void setDefineClass(DefineClassFunctionPointer p); @CField FindClassFunctionPointer getFindClass(); @CField void setFindClass(FindClassFunctionPointer p); @CField FromReflectedMethodFunctionPointer getFromReflectedMethod(); @CField void setFromReflectedMethod(FromReflectedMethodFunctionPointer p); @CField FromReflectedFieldFunctionPointer getFromReflectedField(); @CField void setFromReflectedField(FromReflectedFieldFunctionPointer p); @CField ToReflectedMethodFunctionPointer getToReflectedMethod(); @CField void setToReflectedMethod(ToReflectedMethodFunctionPointer p); @CField GetSuperclassFunctionPointer getGetSuperclass(); @CField void setGetSuperclass(GetSuperclassFunctionPointer p); @CField IsAssignableFromFunctionPointer getIsAssignableFrom(); @CField void setIsAssignableFrom(IsAssignableFromFunctionPointer p); @CField ToReflectedFieldFunctionPointer getToReflectedField(); @CField void setToReflectedField(ToReflectedFieldFunctionPointer p); @CField ThrowFunctionPointer getThrow(); @CField void setThrow(ThrowFunctionPointer p); @CField ThrowNewFunctionPointer getThrowNew(); @CField void setThrowNew(ThrowNewFunctionPointer p); @CField ExceptionOccurredFunctionPointer getExceptionOccurred(); @CField void setExceptionOccurred(ExceptionOccurredFunctionPointer p); @CField ExceptionVoidFunctionPointer getExceptionDescribe(); @CField void setExceptionDescribe(ExceptionVoidFunctionPointer p); @CField ExceptionVoidFunctionPointer getExceptionClear(); @CField void setExceptionClear(ExceptionVoidFunctionPointer p); @CField CFunctionPointer getFatalError(); @CField void setFatalError(CFunctionPointer p); @CField CFunctionPointer getPushLocalFrame(); @CField void setPushLocalFrame(CFunctionPointer p); @CField CFunctionPointer getPopLocalFrame(); @CField void setPopLocalFrame(CFunctionPointer p); @CField NewGlobalRefFunctionPointer getNewGlobalRef(); @CField void setNewGlobalRef(NewGlobalRefFunctionPointer p); @CField DeleteGlobalRefFunctionPointer getDeleteGlobalRef(); @CField void setDeleteGlobalRef(DeleteGlobalRefFunctionPointer p); @CField CFunctionPointer getDeleteLocalRef(); @CField void setDeleteLocalRef(CFunctionPointer p); @CField IsSameObjectFunctionPointer getIsSameObject(); @CField void setIsSameObject(IsSameObjectFunctionPointer p); @CField CFunctionPointer getNewLocalRef(); @CField void setNewLocalRef(CFunctionPointer p); @CField CFunctionPointer getEnsureLocalCapacity(); @CField void setEnsureLocalCapacity(CFunctionPointer p); @CField CFunctionPointer getAllocObject(); @CField void setAllocObject(CFunctionPointer p); @CField CFunctionPointer getNewObject(); @CField void setNewObject(CFunctionPointer p); @CField CFunctionPointer getNewObjectV(); @CField void setNewObjectV(CFunctionPointer p); @CField NewObjectAFunctionPointer getNewObjectA(); @CField void setNewObjectA(NewObjectAFunctionPointer p); @CField GetObjectClassFunctionPointer getGetObjectClass(); @CField void setGetObjectClass(GetObjectClassFunctionPointer p); @CField CFunctionPointer getIsInstanceOf(); @CField void setIsInstanceOf(CFunctionPointer p); @CField GetMethodIDFunctionPointer getGetMethodID(); @CField void setGetMethodID(GetMethodIDFunctionPointer p); @CField CFunctionPointer getCallObjectMethod(); @CField void setCallObjectMethod(CFunctionPointer p); @CField CFunctionPointer getCallObjectMethodV(); @CField void setCallObjectMethodV(CFunctionPointer p); @CField CallObjectMethodAFunctionPointer getCallObjectMethodA(); @CField void setCallObjectMethodA(CallObjectMethodAFunctionPointer p); @CField CFunctionPointer getCallBooleanMethod(); @CField void setCallBooleanMethod(CFunctionPointer p); @CField CFunctionPointer getCallBooleanMethodV(); @CField void setCallBooleanMethodV(CFunctionPointer p); @CField CallBooleanMethodAFunctionPointer getCallBooleanMethodA(); @CField void setCallBooleanMethodA(CallBooleanMethodAFunctionPointer p); @CField CFunctionPointer getCallByteMethod(); @CField void setCallByteMethod(CFunctionPointer p); @CField CFunctionPointer getCallByteMethodV(); @CField void setCallByteMethodV(CFunctionPointer p); @CField CFunctionPointer getCallByteMethodA(); @CField void setCallByteMethodA(CFunctionPointer p); @CField CFunctionPointer getCallCharMethod(); @CField void setCallCharMethod(CFunctionPointer p); @CField CFunctionPointer getCallCharMethodV(); @CField void setCallCharMethodV(CFunctionPointer p); @CField CFunctionPointer getCallCharMethodA(); @CField void setCallCharMethodA(CFunctionPointer p); @CField CFunctionPointer getCallShortMethod(); @CField void setCallShortMethod(CFunctionPointer p); @CField CFunctionPointer getCallShortMethodV(); @CField void setCallShortMethodV(CFunctionPointer p); @CField CFunctionPointer getCallShortMethodA(); @CField void setCallShortMethodA(CFunctionPointer p); @CField CFunctionPointer getCallIntMethod(); @CField void setCallIntMethod(CFunctionPointer p); @CField CFunctionPointer getCallIntMethodV(); @CField void setCallIntMethodV(CFunctionPointer p); @CField CallIntMethodAFunctionPointer getCallIntMethodA(); @CField void setCallIntMethodA(CallIntMethodAFunctionPointer p); @CField CFunctionPointer getCallLongMethod(); @CField void setCallLongMethod(CFunctionPointer p); @CField CFunctionPointer getCallLongMethodV(); @CField void setCallLongMethodV(CFunctionPointer p); @CField CallLongMethodAFunctionPointer getCallLongMethodA(); @CField void setCallLongMethodA(CallLongMethodAFunctionPointer p); @CField CFunctionPointer getCallFloatMethod(); @CField void setCallFloatMethod(CFunctionPointer p); @CField CFunctionPointer getCallFloatMethodV(); @CField void setCallFloatMethodV(CFunctionPointer p); @CField CFunctionPointer getCallFloatMethodA(); @CField void setCallFloatMethodA(CFunctionPointer p); @CField CFunctionPointer getCallDoubleMethod(); @CField void setCallDoubleMethod(CFunctionPointer p); @CField CFunctionPointer getCallDoubleMethodV(); @CField void setCallDoubleMethodV(CFunctionPointer p); @CField CFunctionPointer getCallDoubleMethodA(); @CField void setCallDoubleMethodA(CFunctionPointer p); @CField CFunctionPointer getCallVoidMethod(); @CField void setCallVoidMethod(CFunctionPointer p); @CField CFunctionPointer getCallVoidMethodV(); @CField void setCallVoidMethodV(CFunctionPointer p); @CField CallVoidMethodAFunctionPointer getCallVoidMethodA(); @CField void setCallVoidMethodA(CallVoidMethodAFunctionPointer p); @CField CFunctionPointer getCallNonvirtualObjectMethod(); @CField void setCallNonvirtualObjectMethod(CFunctionPointer p); @CField CFunctionPointer getCallNonvirtualObjectMethodV(); @CField void setCallNonvirtualObjectMethodV(CFunctionPointer p); @CField CFunctionPointer getCallNonvirtualObjectMethodA(); @CField void setCallNonvirtualObjectMethodA(CFunctionPointer p); @CField CFunctionPointer getCallNonvirtualBooleanMethod(); @CField void setCallNonvirtualBooleanMethod(CFunctionPointer p); @CField CFunctionPointer getCallNonvirtualBooleanMethodV(); @CField void setCallNonvirtualBooleanMethodV(CFunctionPointer p); @CField CFunctionPointer getCallNonvirtualBooleanMethodA(); @CField void setCallNonvirtualBooleanMethodA(CFunctionPointer p); @CField CFunctionPointer getCallNonvirtualByteMethod(); @CField void setCallNonvirtualByteMethod(CFunctionPointer p); @CField CFunctionPointer getCallNonvirtualByteMethodV(); @CField void setCallNonvirtualByteMethodV(CFunctionPointer p); @CField CFunctionPointer getCallNonvirtualByteMethodA(); @CField void setCallNonvirtualByteMethodA(CFunctionPointer p); @CField CFunctionPointer getCallNonvirtualCharMethod(); @CField void setCallNonvirtualCharMethod(CFunctionPointer p); @CField CFunctionPointer getCallNonvirtualCharMethodV(); @CField void setCallNonvirtualCharMethodV(CFunctionPointer p); @CField CFunctionPointer getCallNonvirtualCharMethodA(); @CField void setCallNonvirtualCharMethodA(CFunctionPointer p); @CField CFunctionPointer getCallNonvirtualShortMethod(); @CField void setCallNonvirtualShortMethod(CFunctionPointer p); @CField CFunctionPointer getCallNonvirtualShortMethodV(); @CField void setCallNonvirtualShortMethodV(CFunctionPointer p); @CField CFunctionPointer getCallNonvirtualShortMethodA(); @CField void setCallNonvirtualShortMethodA(CFunctionPointer p); @CField CFunctionPointer getCallNonvirtualIntMethod(); @CField void setCallNonvirtualIntMethod(CFunctionPointer p); @CField CFunctionPointer getCallNonvirtualIntMethodV(); @CField void setCallNonvirtualIntMethodV(CFunctionPointer p); @CField CFunctionPointer getCallNonvirtualIntMethodA(); @CField void setCallNonvirtualIntMethodA(CFunctionPointer p); @CField CFunctionPointer getCallNonvirtualLongMethod(); @CField void setCallNonvirtualLongMethod(CFunctionPointer p); @CField CFunctionPointer getCallNonvirtualLongMethodV(); @CField void setCallNonvirtualLongMethodV(CFunctionPointer p); @CField CFunctionPointer getCallNonvirtualLongMethodA(); @CField void setCallNonvirtualLongMethodA(CFunctionPointer p); @CField CFunctionPointer getCallNonvirtualFloatMethod(); @CField void setCallNonvirtualFloatMethod(CFunctionPointer p); @CField CFunctionPointer getCallNonvirtualFloatMethodV(); @CField void setCallNonvirtualFloatMethodV(CFunctionPointer p); @CField CFunctionPointer getCallNonvirtualFloatMethodA(); @CField void setCallNonvirtualFloatMethodA(CFunctionPointer p); @CField CFunctionPointer getCallNonvirtualDoubleMethod(); @CField void setCallNonvirtualDoubleMethod(CFunctionPointer p); @CField CFunctionPointer getCallNonvirtualDoubleMethodV(); @CField void setCallNonvirtualDoubleMethodV(CFunctionPointer p); @CField CFunctionPointer getCallNonvirtualDoubleMethodA(); @CField void setCallNonvirtualDoubleMethodA(CFunctionPointer p); @CField CFunctionPointer getCallNonvirtualVoidMethod(); @CField void setCallNonvirtualVoidMethod(CFunctionPointer p); @CField CFunctionPointer getCallNonvirtualVoidMethodV(); @CField void setCallNonvirtualVoidMethodV(CFunctionPointer p); @CField CFunctionPointer getCallNonvirtualVoidMethodA(); @CField void setCallNonvirtualVoidMethodA(CFunctionPointer p); @CField GetFieldIDFunctionPointer getGetFieldID(); @CField void setGetFieldID(GetFieldIDFunctionPointer p); @CField GetObjectFieldFunctionPointer getGetObjectField(); @CField void setGetObjectField(GetObjectFieldFunctionPointer p); @CField GetBooleanFieldFunctionPointer getGetBooleanField(); @CField void setGetBooleanField(GetBooleanFieldFunctionPointer p); @CField CFunctionPointer getGetByteField(); @CField void setGetByteField(CFunctionPointer p); @CField CFunctionPointer getGetCharField(); @CField void setGetCharField(CFunctionPointer p); @CField CFunctionPointer getGetShortField(); @CField void setGetShortField(CFunctionPointer p); @CField CFunctionPointer getGetIntField(); @CField void setGetIntField(CFunctionPointer p); @CField CFunctionPointer getGetLongField(); @CField void setGetLongField(CFunctionPointer p); @CField CFunctionPointer getGetFloatField(); @CField void setGetFloatField(CFunctionPointer p); @CField CFunctionPointer getGetDoubleField(); @CField void setGetDoubleField(CFunctionPointer p); @CField CFunctionPointer getSetObjectField(); @CField void setSetObjectField(CFunctionPointer p); @CField CFunctionPointer getSetBooleanField(); @CField void setSetBooleanField(CFunctionPointer p); @CField CFunctionPointer getSetByteField(); @CField void setSetByteField(CFunctionPointer p); @CField CFunctionPointer getSetCharField(); @CField void setSetCharField(CFunctionPointer p); @CField CFunctionPointer getSetShortField(); @CField void setSetShortField(CFunctionPointer p); @CField CFunctionPointer getSetIntField(); @CField void setSetIntField(CFunctionPointer p); @CField CFunctionPointer getSetLongField(); @CField void setSetLongField(CFunctionPointer p); @CField CFunctionPointer getSetFloatField(); @CField void setSetFloatField(CFunctionPointer p); @CField CFunctionPointer getSetDoubleField(); @CField void setSetDoubleField(CFunctionPointer p); @CField GetMethodIDFunctionPointer getGetStaticMethodID(); @CField void setGetStaticMethodID(GetMethodIDFunctionPointer p); @CField CFunctionPointer getCallStaticObjectMethod(); @CField void setCallStaticObjectMethod(CFunctionPointer p); @CField CFunctionPointer getCallStaticObjectMethodV(); @CField void setCallStaticObjectMethodV(CFunctionPointer p); @CField CallStaticObjectMethodAFunctionPointer getCallStaticObjectMethodA(); @CField void setCallStaticObjectMethodA(CallStaticObjectMethodAFunctionPointer p); @CField CFunctionPointer getCallStaticBooleanMethod(); @CField void setCallStaticBooleanMethod(CFunctionPointer p); @CField CFunctionPointer getCallStaticBooleanMethodV(); @CField void setCallStaticBooleanMethodV(CFunctionPointer p); @CField CallBooleanMethodAFunctionPointer getCallStaticBooleanMethodA(); @CField void setCallStaticBooleanMethodA(CallBooleanMethodAFunctionPointer p); @CField CFunctionPointer getCallStaticByteMethod(); @CField void setCallStaticByteMethod(CFunctionPointer p); @CField CFunctionPointer getCallStaticByteMethodV(); @CField void setCallStaticByteMethodV(CFunctionPointer p); @CField CFunctionPointer getCallStaticByteMethodA(); @CField void setCallStaticByteMethodA(CFunctionPointer p); @CField CFunctionPointer getCallStaticCharMethod(); @CField void setCallStaticCharMethod(CFunctionPointer p); @CField CFunctionPointer getCallStaticCharMethodV(); @CField void setCallStaticCharMethodV(CFunctionPointer p); @CField CFunctionPointer getCallStaticCharMethodA(); @CField void setCallStaticCharMethodA(CFunctionPointer p); @CField CFunctionPointer getCallStaticShortMethod(); @CField void setCallStaticShortMethod(CFunctionPointer p); @CField CFunctionPointer getCallStaticShortMethodV(); @CField void setCallStaticShortMethodV(CFunctionPointer p); @CField CFunctionPointer getCallStaticShortMethodA(); @CField void setCallStaticShortMethodA(CFunctionPointer p); @CField CFunctionPointer getCallStaticIntMethod(); @CField void setCallStaticIntMethod(CFunctionPointer p); @CField CFunctionPointer getCallStaticIntMethodV(); @CField void setCallStaticIntMethodV(CFunctionPointer p); @CField CFunctionPointer getCallStaticIntMethodA(); @CField void setCallStaticIntMethodA(CFunctionPointer p); @CField CFunctionPointer getCallStaticLongMethod(); @CField void setCallStaticLongMethod(CFunctionPointer p); @CField CFunctionPointer getCallStaticLongMethodV(); @CField void setCallStaticLongMethodV(CFunctionPointer p); @CField CallStaticLongMethodAFunctionPointer getCallStaticLongMethodA(); @CField void setCallStaticLongMethodA(CallStaticLongMethodAFunctionPointer p); @CField CFunctionPointer getCallStaticFloatMethod(); @CField void setCallStaticFloatMethod(CFunctionPointer p); @CField CFunctionPointer getCallStaticFloatMethodV(); @CField void setCallStaticFloatMethodV(CFunctionPointer p); @CField CFunctionPointer getCallStaticFloatMethodA(); @CField void setCallStaticFloatMethodA(CFunctionPointer p); @CField CFunctionPointer getCallStaticDoubleMethod(); @CField void setCallStaticDoubleMethod(CFunctionPointer p); @CField CFunctionPointer getCallStaticDoubleMethodV(); @CField void setCallStaticDoubleMethodV(CFunctionPointer p); @CField CFunctionPointer getCallStaticDoubleMethodA(); @CField void setCallStaticDoubleMethodA(CFunctionPointer p); @CField CFunctionPointer getCallStaticVoidMethod(); @CField void setCallStaticVoidMethod(CFunctionPointer p); @CField CFunctionPointer getCallStaticVoidMethodV(); @CField void setCallStaticVoidMethodV(CFunctionPointer p); @CField CallVoidMethodAFunctionPointer getCallStaticVoidMethodA(); @CField void setCallStaticVoidMethodA(CallVoidMethodAFunctionPointer p); @CField GetFieldIDFunctionPointer getGetStaticFieldID(); @CField void setGetStaticFieldID(GetFieldIDFunctionPointer p); @CField GetStaticObjectFieldFunctionPointer getGetStaticObjectField(); @CField void setGetStaticObjectField(CFunctionPointer p); @CField CFunctionPointer getGetStaticBooleanField(); @CField void setGetStaticBooleanField(CFunctionPointer p); @CField CFunctionPointer getGetStaticByteField(); @CField void setGetStaticByteField(CFunctionPointer p); @CField CFunctionPointer getGetStaticCharField(); @CField void setGetStaticCharField(CFunctionPointer p); @CField CFunctionPointer getGetStaticShortField(); @CField void setGetStaticShortField(CFunctionPointer p); @CField CFunctionPointer getGetStaticIntField(); @CField void setGetStaticIntField(CFunctionPointer p); @CField CFunctionPointer getGetStaticLongField(); @CField void setGetStaticLongField(CFunctionPointer p); @CField CFunctionPointer getGetStaticFloatField(); @CField void setGetStaticFloatField(CFunctionPointer p); @CField CFunctionPointer getGetStaticDoubleField(); @CField void setGetStaticDoubleField(CFunctionPointer p); @CField CFunctionPointer getSetStaticObjectField(); @CField void setSetStaticObjectField(CFunctionPointer p); @CField CFunctionPointer getSetStaticBooleanField(); @CField void setSetStaticBooleanField(CFunctionPointer p); @CField CFunctionPointer getSetStaticByteField(); @CField void setSetStaticByteField(CFunctionPointer p); @CField CFunctionPointer getSetStaticCharField(); @CField void setSetStaticCharField(CFunctionPointer p); @CField CFunctionPointer getSetStaticShortField(); @CField void setSetStaticShortField(CFunctionPointer p); @CField CFunctionPointer getSetStaticIntField(); @CField void setSetStaticIntField(CFunctionPointer p); @CField CFunctionPointer getSetStaticLongField(); @CField void setSetStaticLongField(CFunctionPointer p); @CField CFunctionPointer getSetStaticFloatField(); @CField void setSetStaticFloatField(CFunctionPointer p); @CField CFunctionPointer getSetStaticDoubleField(); @CField void setSetStaticDoubleField(CFunctionPointer p); @CField CFunctionPointer getNewString(); @CField void setNewString(CFunctionPointer p); @CField CFunctionPointer getGetStringLength(); @CField void setGetStringLength(CFunctionPointer p); @CField CFunctionPointer getGetStringChars(); @CField void setGetStringChars(CFunctionPointer p); @CField CFunctionPointer getReleaseStringChars(); @CField void setReleaseStringChars(CFunctionPointer p); @CField NewStringUTFFunctionPointer getNewStringUTF(); @CField void setNewStringUTF(NewStringUTFFunctionPointer p); @CField CFunctionPointer getGetStringUTFLength(); @CField void setGetStringUTFLength(CFunctionPointer p); @CField GetStringUTFCharsFunctionPointer getGetStringUTFChars(); @CField void setGetStringUTFChars(GetStringUTFCharsFunctionPointer p); @CField ReleaseStringUTFCharsFunctionPointer getReleaseStringUTFChars(); @CField void setReleaseStringUTFChars(ReleaseStringUTFCharsFunctionPointer p); @CField GetArrayLengthFunctionPointer getGetArrayLength(); @CField void setGetArrayLength(GetArrayLengthFunctionPointer p); @CField NewObjectArrayFunctionPointer getNewObjectArray(); @CField void setNewObjectArray(NewObjectArrayFunctionPointer p); @CField GetObjectArrayElementFunctionPointer getGetObjectArrayElement(); @CField void setGetObjectArrayElement(GetObjectArrayElementFunctionPointer p); @CField SetObjectArrayElementFunctionPointer getSetObjectArrayElement(); @CField void setSetObjectArrayElement(SetObjectArrayElementFunctionPointer p); @CField CFunctionPointer getNewBooleanArray(); @CField void setNewBooleanArray(CFunctionPointer p); @CField NewByteArrayFunctionPointer getNewByteArray(); @CField void setNewByteArray(NewByteArrayFunctionPointer p); @CField CFunctionPointer getNewCharArray(); @CField void setNewCharArray(CFunctionPointer p); @CField CFunctionPointer getNewShortArray(); @CField void setNewShortArray(CFunctionPointer p); @CField CFunctionPointer getNewIntArray(); @CField void setNewIntArray(CFunctionPointer p); @CField CFunctionPointer getNewLongArray(); @CField void setNewLongArray(CFunctionPointer p); @CField CFunctionPointer getNewFloatArray(); @CField void setNewFloatArray(CFunctionPointer p); @CField CFunctionPointer getNewDoubleArray(); @CField void setNewDoubleArray(CFunctionPointer p); @CField CFunctionPointer getGetBooleanArrayElements(); @CField void setGetBooleanArrayElements(CFunctionPointer p); @CField GetByteArrayElementsFunctionPointer getGetByteArrayElements(); @CField void setGetByteArrayElements(GetByteArrayElementsFunctionPointer p); @CField CFunctionPointer getGetCharArrayElements(); @CField void setGetCharArrayElements(CFunctionPointer p); @CField CFunctionPointer getGetShortArrayElements(); @CField void setGetShortArrayElements(CFunctionPointer p); @CField CFunctionPointer getGetIntArrayElements(); @CField void setGetIntArrayElements(CFunctionPointer p); @CField CFunctionPointer getGetLongArrayElements(); @CField void setGetLongArrayElements(CFunctionPointer p); @CField CFunctionPointer getGetFloatArrayElements(); @CField void setGetFloatArrayElements(CFunctionPointer p); @CField CFunctionPointer getGetDoubleArrayElements(); @CField void setGetDoubleArrayElements(CFunctionPointer p); @CField CFunctionPointer getReleaseBooleanArrayElements(); @CField void setReleaseBooleanArrayElements(CFunctionPointer p); @CField ReleaseByteArrayElementsFunctionPointer getReleaseByteArrayElements(); @CField void setReleaseByteArrayElements(ReleaseByteArrayElementsFunctionPointer p); @CField CFunctionPointer getReleaseCharArrayElements(); @CField void setReleaseCharArrayElements(CFunctionPointer p); @CField CFunctionPointer getReleaseShortArrayElements(); @CField void setReleaseShortArrayElements(CFunctionPointer p); @CField CFunctionPointer getReleaseIntArrayElements(); @CField void setReleaseIntArrayElements(CFunctionPointer p); @CField CFunctionPointer getReleaseLongArrayElements(); @CField void setReleaseLongArrayElements(CFunctionPointer p); @CField CFunctionPointer getReleaseFloatArrayElements(); @CField void setReleaseFloatArrayElements(CFunctionPointer p); @CField CFunctionPointer getReleaseDoubleArrayElements(); @CField void setReleaseDoubleArrayElements(CFunctionPointer p); @CField CFunctionPointer getGetBooleanArrayRegion(); @CField void setGetBooleanArrayRegion(CFunctionPointer p); @CField CFunctionPointer getGetByteArrayRegion(); @CField void setGetByteArrayRegion(CFunctionPointer p); @CField CFunctionPointer getGetCharArrayRegion(); @CField void setGetCharArrayRegion(CFunctionPointer p); @CField CFunctionPointer getGetShortArrayRegion(); @CField void setGetShortArrayRegion(CFunctionPointer p); @CField CFunctionPointer getGetIntArrayRegion(); @CField void setGetIntArrayRegion(CFunctionPointer p); @CField CFunctionPointer getGetLongArrayRegion(); @CField void setGetLongArrayRegion(CFunctionPointer p); @CField CFunctionPointer getGetFloatArrayRegion(); @CField void setGetFloatArrayRegion(CFunctionPointer p); @CField CFunctionPointer getGetDoubleArrayRegion(); @CField void setGetDoubleArrayRegion(CFunctionPointer p); @CField CFunctionPointer getSetBooleanArrayRegion(); @CField void setSetBooleanArrayRegion(CFunctionPointer p); @CField CFunctionPointer getSetByteArrayRegion(); @CField void setSetByteArrayRegion(CFunctionPointer p); @CField CFunctionPointer getSetCharArrayRegion(); @CField void setSetCharArrayRegion(CFunctionPointer p); @CField CFunctionPointer getSetShortArrayRegion(); @CField void setSetShortArrayRegion(CFunctionPointer p); @CField CFunctionPointer getSetIntArrayRegion(); @CField void setSetIntArrayRegion(CFunctionPointer p); @CField CFunctionPointer getSetLongArrayRegion(); @CField void setSetLongArrayRegion(CFunctionPointer p); @CField CFunctionPointer getSetFloatArrayRegion(); @CField void setSetFloatArrayRegion(CFunctionPointer p); @CField CFunctionPointer getSetDoubleArrayRegion(); @CField void setSetDoubleArrayRegion(CFunctionPointer p); @CField RegisterNativesFunctionPointer getRegisterNatives(); @CField void setRegisterNatives(RegisterNativesFunctionPointer p); @CField CFunctionPointer getUnregisterNatives(); @CField void setUnregisterNatives(CFunctionPointer p); @CField CFunctionPointer getMonitorEnter(); @CField void setMonitorEnter(CFunctionPointer p); @CField CFunctionPointer getMonitorExit(); @CField void setMonitorExit(CFunctionPointer p); @CField CFunctionPointer getGetJavaVM(); @CField void setGetJavaVM(CFunctionPointer p); @CField CFunctionPointer getGetStringRegion(); @CField void setGetStringRegion(CFunctionPointer p); @CField CFunctionPointer getGetStringUTFRegion(); @CField void setGetStringUTFRegion(CFunctionPointer p); @CField CFunctionPointer getGetPrimitiveArrayCritical(); @CField void setGetPrimitiveArrayCritical(CFunctionPointer p); @CField CFunctionPointer getReleasePrimitiveArrayCritical(); @CField void setReleasePrimitiveArrayCritical(CFunctionPointer p); @CField CFunctionPointer getGetStringCritical(); @CField void setGetStringCritical(CFunctionPointer p); @CField CFunctionPointer getReleaseStringCritical(); @CField void setReleaseStringCritical(CFunctionPointer p); @CField CFunctionPointer getNewWeakGlobalRef(); @CField void setNewWeakGlobalRef(CFunctionPointer p); @CField CFunctionPointer getDeleteWeakGlobalRef(); @CField void setDeleteWeakGlobalRef(CFunctionPointer p); @CField ExceptionCheckFunctionPointer getExceptionCheck(); @CField void setExceptionCheck(ExceptionCheckFunctionPointer p); @CField CFunctionPointer getNewDirectByteBuffer(); @CField void setNewDirectByteBuffer(CFunctionPointer p); @CField CFunctionPointer getGetDirectBufferAddress(); @CField void setGetDirectBufferAddress(CFunctionPointer p); @CField CFunctionPointer getGetDirectBufferCapacity(); @CField void setGetDirectBufferCapacity(CFunctionPointer p); @CField CFunctionPointer getGetObjectRefType(); // JNI 1.6 @CField void setGetObjectRefType(CFunctionPointer p); }
googleapis/google-cloud-java
36,139
java-websecurityscanner/proto-google-cloud-websecurityscanner-v1/src/main/java/com/google/cloud/websecurityscanner/v1/ListFindingsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/websecurityscanner/v1/web_security_scanner.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.websecurityscanner.v1; /** * * * <pre> * Request for the `ListFindings` method. * </pre> * * Protobuf type {@code google.cloud.websecurityscanner.v1.ListFindingsRequest} */ public final class ListFindingsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.websecurityscanner.v1.ListFindingsRequest) ListFindingsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListFindingsRequest.newBuilder() to construct. private ListFindingsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListFindingsRequest() { parent_ = ""; filter_ = ""; pageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListFindingsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.websecurityscanner.v1.WebSecurityScannerProto .internal_static_google_cloud_websecurityscanner_v1_ListFindingsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.websecurityscanner.v1.WebSecurityScannerProto .internal_static_google_cloud_websecurityscanner_v1_ListFindingsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.websecurityscanner.v1.ListFindingsRequest.class, com.google.cloud.websecurityscanner.v1.ListFindingsRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent resource name, which should be a scan run resource name in the * format * 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. * </pre> * * <code>string parent = 1;</code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The parent resource name, which should be a scan run resource name in the * format * 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. * </pre> * * <code>string parent = 1;</code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object filter_ = ""; /** * * * <pre> * The filter expression. The expression must be in the format: &lt;field&gt; * &lt;operator&gt; &lt;value&gt;. * Supported field: 'finding_type'. * Supported operator: '='. * </pre> * * <code>string filter = 2;</code> * * @return The filter. */ @java.lang.Override public java.lang.String getFilter() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * * * <pre> * The filter expression. The expression must be in the format: &lt;field&gt; * &lt;operator&gt; &lt;value&gt;. * Supported field: 'finding_type'. * Supported operator: '='. * </pre> * * <code>string filter = 2;</code> * * @return The bytes for filter. */ @java.lang.Override public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * A token identifying a page of results to be returned. This should be a * `next_page_token` value returned from a previous List request. * If unspecified, the first page of results is returned. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * A token identifying a page of results to be returned. This should be a * `next_page_token` value returned from a previous List request. * If unspecified, the first page of results is returned. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 4; private int pageSize_ = 0; /** * * * <pre> * The maximum number of Findings to return, can be limited by server. * If not specified or not positive, the implementation will select a * reasonable value. * </pre> * * <code>int32 page_size = 4;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, filter_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } if (pageSize_ != 0) { output.writeInt32(4, pageSize_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, filter_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(4, pageSize_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.websecurityscanner.v1.ListFindingsRequest)) { return super.equals(obj); } com.google.cloud.websecurityscanner.v1.ListFindingsRequest other = (com.google.cloud.websecurityscanner.v1.ListFindingsRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getFilter().equals(other.getFilter())) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.websecurityscanner.v1.ListFindingsRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.websecurityscanner.v1.ListFindingsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.websecurityscanner.v1.ListFindingsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.websecurityscanner.v1.ListFindingsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.websecurityscanner.v1.ListFindingsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.websecurityscanner.v1.ListFindingsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.websecurityscanner.v1.ListFindingsRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.websecurityscanner.v1.ListFindingsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.websecurityscanner.v1.ListFindingsRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.websecurityscanner.v1.ListFindingsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.websecurityscanner.v1.ListFindingsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.websecurityscanner.v1.ListFindingsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.websecurityscanner.v1.ListFindingsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request for the `ListFindings` method. * </pre> * * Protobuf type {@code google.cloud.websecurityscanner.v1.ListFindingsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.websecurityscanner.v1.ListFindingsRequest) com.google.cloud.websecurityscanner.v1.ListFindingsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.websecurityscanner.v1.WebSecurityScannerProto .internal_static_google_cloud_websecurityscanner_v1_ListFindingsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.websecurityscanner.v1.WebSecurityScannerProto .internal_static_google_cloud_websecurityscanner_v1_ListFindingsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.websecurityscanner.v1.ListFindingsRequest.class, com.google.cloud.websecurityscanner.v1.ListFindingsRequest.Builder.class); } // Construct using com.google.cloud.websecurityscanner.v1.ListFindingsRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; filter_ = ""; pageToken_ = ""; pageSize_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.websecurityscanner.v1.WebSecurityScannerProto .internal_static_google_cloud_websecurityscanner_v1_ListFindingsRequest_descriptor; } @java.lang.Override public com.google.cloud.websecurityscanner.v1.ListFindingsRequest getDefaultInstanceForType() { return com.google.cloud.websecurityscanner.v1.ListFindingsRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.websecurityscanner.v1.ListFindingsRequest build() { com.google.cloud.websecurityscanner.v1.ListFindingsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.websecurityscanner.v1.ListFindingsRequest buildPartial() { com.google.cloud.websecurityscanner.v1.ListFindingsRequest result = new com.google.cloud.websecurityscanner.v1.ListFindingsRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.websecurityscanner.v1.ListFindingsRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.filter_ = filter_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.pageSize_ = pageSize_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.websecurityscanner.v1.ListFindingsRequest) { return mergeFrom((com.google.cloud.websecurityscanner.v1.ListFindingsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.websecurityscanner.v1.ListFindingsRequest other) { if (other == com.google.cloud.websecurityscanner.v1.ListFindingsRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { filter_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 32: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000008; break; } // case 32 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent resource name, which should be a scan run resource name in the * format * 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. * </pre> * * <code>string parent = 1;</code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The parent resource name, which should be a scan run resource name in the * format * 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. * </pre> * * <code>string parent = 1;</code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The parent resource name, which should be a scan run resource name in the * format * 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. * </pre> * * <code>string parent = 1;</code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The parent resource name, which should be a scan run resource name in the * format * 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. * </pre> * * <code>string parent = 1;</code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The parent resource name, which should be a scan run resource name in the * format * 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. * </pre> * * <code>string parent = 1;</code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object filter_ = ""; /** * * * <pre> * The filter expression. The expression must be in the format: &lt;field&gt; * &lt;operator&gt; &lt;value&gt;. * Supported field: 'finding_type'. * Supported operator: '='. * </pre> * * <code>string filter = 2;</code> * * @return The filter. */ public java.lang.String getFilter() { java.lang.Object ref = filter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The filter expression. The expression must be in the format: &lt;field&gt; * &lt;operator&gt; &lt;value&gt;. * Supported field: 'finding_type'. * Supported operator: '='. * </pre> * * <code>string filter = 2;</code> * * @return The bytes for filter. */ public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The filter expression. The expression must be in the format: &lt;field&gt; * &lt;operator&gt; &lt;value&gt;. * Supported field: 'finding_type'. * Supported operator: '='. * </pre> * * <code>string filter = 2;</code> * * @param value The filter to set. * @return This builder for chaining. */ public Builder setFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The filter expression. The expression must be in the format: &lt;field&gt; * &lt;operator&gt; &lt;value&gt;. * Supported field: 'finding_type'. * Supported operator: '='. * </pre> * * <code>string filter = 2;</code> * * @return This builder for chaining. */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The filter expression. The expression must be in the format: &lt;field&gt; * &lt;operator&gt; &lt;value&gt;. * Supported field: 'finding_type'. * Supported operator: '='. * </pre> * * <code>string filter = 2;</code> * * @param value The bytes for filter to set. * @return This builder for chaining. */ public Builder setFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * A token identifying a page of results to be returned. This should be a * `next_page_token` value returned from a previous List request. * If unspecified, the first page of results is returned. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token identifying a page of results to be returned. This should be a * `next_page_token` value returned from a previous List request. * If unspecified, the first page of results is returned. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token identifying a page of results to be returned. This should be a * `next_page_token` value returned from a previous List request. * If unspecified, the first page of results is returned. * </pre> * * <code>string page_token = 3;</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * A token identifying a page of results to be returned. This should be a * `next_page_token` value returned from a previous List request. * If unspecified, the first page of results is returned. * </pre> * * <code>string page_token = 3;</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * A token identifying a page of results to be returned. This should be a * `next_page_token` value returned from a previous List request. * If unspecified, the first page of results is returned. * </pre> * * <code>string page_token = 3;</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private int pageSize_; /** * * * <pre> * The maximum number of Findings to return, can be limited by server. * If not specified or not positive, the implementation will select a * reasonable value. * </pre> * * <code>int32 page_size = 4;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * The maximum number of Findings to return, can be limited by server. * If not specified or not positive, the implementation will select a * reasonable value. * </pre> * * <code>int32 page_size = 4;</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * The maximum number of Findings to return, can be limited by server. * If not specified or not positive, the implementation will select a * reasonable value. * </pre> * * <code>int32 page_size = 4;</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000008); pageSize_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.websecurityscanner.v1.ListFindingsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1.ListFindingsRequest) private static final com.google.cloud.websecurityscanner.v1.ListFindingsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.websecurityscanner.v1.ListFindingsRequest(); } public static com.google.cloud.websecurityscanner.v1.ListFindingsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListFindingsRequest> PARSER = new com.google.protobuf.AbstractParser<ListFindingsRequest>() { @java.lang.Override public ListFindingsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListFindingsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListFindingsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.websecurityscanner.v1.ListFindingsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,154
java-cloudsupport/proto-google-cloud-cloudsupport-v2beta/src/main/java/com/google/cloud/support/v2beta/ShowFeedRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/support/v2beta/feed_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.support.v2beta; /** * * * <pre> * The request message for the ShowFeed endpoint. * </pre> * * Protobuf type {@code google.cloud.support.v2beta.ShowFeedRequest} */ public final class ShowFeedRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.support.v2beta.ShowFeedRequest) ShowFeedRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ShowFeedRequest.newBuilder() to construct. private ShowFeedRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ShowFeedRequest() { parent_ = ""; orderBy_ = ""; pageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ShowFeedRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.support.v2beta.FeedServiceProto .internal_static_google_cloud_support_v2beta_ShowFeedRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.support.v2beta.FeedServiceProto .internal_static_google_cloud_support_v2beta_ShowFeedRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.support.v2beta.ShowFeedRequest.class, com.google.cloud.support.v2beta.ShowFeedRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The resource name of the case for which feed items should be * listed. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The resource name of the case for which feed items should be * listed. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ORDER_BY_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object orderBy_ = ""; /** * * * <pre> * Optional. Field to order feed items by, followed by `asc` or `desc` * postfix. The only valid field is * `creation_time`. This list is case-insensitive, default sorting order is * ascending, and the redundant space characters are insignificant. * * Example: `creation_time desc` * </pre> * * <code>string order_by = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The orderBy. */ @java.lang.Override public java.lang.String getOrderBy() { java.lang.Object ref = orderBy_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); orderBy_ = s; return s; } } /** * * * <pre> * Optional. Field to order feed items by, followed by `asc` or `desc` * postfix. The only valid field is * `creation_time`. This list is case-insensitive, default sorting order is * ascending, and the redundant space characters are insignificant. * * Example: `creation_time desc` * </pre> * * <code>string order_by = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for orderBy. */ @java.lang.Override public com.google.protobuf.ByteString getOrderByBytes() { java.lang.Object ref = orderBy_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); orderBy_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 3; private int pageSize_ = 0; /** * * * <pre> * Optional. The maximum number of feed items fetched with each request. * </pre> * * <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. A token identifying the page of results to return. If * unspecified, it retrieves the first page. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * Optional. A token identifying the page of results to return. If * unspecified, it retrieves the first page. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, orderBy_); } if (pageSize_ != 0) { output.writeInt32(3, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, orderBy_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.support.v2beta.ShowFeedRequest)) { return super.equals(obj); } com.google.cloud.support.v2beta.ShowFeedRequest other = (com.google.cloud.support.v2beta.ShowFeedRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getOrderBy().equals(other.getOrderBy())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + ORDER_BY_FIELD_NUMBER; hash = (53 * hash) + getOrderBy().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.support.v2beta.ShowFeedRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.support.v2beta.ShowFeedRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.support.v2beta.ShowFeedRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.support.v2beta.ShowFeedRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.support.v2beta.ShowFeedRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.support.v2beta.ShowFeedRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.support.v2beta.ShowFeedRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.support.v2beta.ShowFeedRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.support.v2beta.ShowFeedRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.support.v2beta.ShowFeedRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.support.v2beta.ShowFeedRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.support.v2beta.ShowFeedRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.support.v2beta.ShowFeedRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The request message for the ShowFeed endpoint. * </pre> * * Protobuf type {@code google.cloud.support.v2beta.ShowFeedRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.support.v2beta.ShowFeedRequest) com.google.cloud.support.v2beta.ShowFeedRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.support.v2beta.FeedServiceProto .internal_static_google_cloud_support_v2beta_ShowFeedRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.support.v2beta.FeedServiceProto .internal_static_google_cloud_support_v2beta_ShowFeedRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.support.v2beta.ShowFeedRequest.class, com.google.cloud.support.v2beta.ShowFeedRequest.Builder.class); } // Construct using com.google.cloud.support.v2beta.ShowFeedRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; orderBy_ = ""; pageSize_ = 0; pageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.support.v2beta.FeedServiceProto .internal_static_google_cloud_support_v2beta_ShowFeedRequest_descriptor; } @java.lang.Override public com.google.cloud.support.v2beta.ShowFeedRequest getDefaultInstanceForType() { return com.google.cloud.support.v2beta.ShowFeedRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.support.v2beta.ShowFeedRequest build() { com.google.cloud.support.v2beta.ShowFeedRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.support.v2beta.ShowFeedRequest buildPartial() { com.google.cloud.support.v2beta.ShowFeedRequest result = new com.google.cloud.support.v2beta.ShowFeedRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.support.v2beta.ShowFeedRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.orderBy_ = orderBy_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.pageToken_ = pageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.support.v2beta.ShowFeedRequest) { return mergeFrom((com.google.cloud.support.v2beta.ShowFeedRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.support.v2beta.ShowFeedRequest other) { if (other == com.google.cloud.support.v2beta.ShowFeedRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getOrderBy().isEmpty()) { orderBy_ = other.orderBy_; bitField0_ |= 0x00000002; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { orderBy_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000004; break; } // case 24 case 34: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The resource name of the case for which feed items should be * listed. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The resource name of the case for which feed items should be * listed. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The resource name of the case for which feed items should be * listed. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The resource name of the case for which feed items should be * listed. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The resource name of the case for which feed items should be * listed. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object orderBy_ = ""; /** * * * <pre> * Optional. Field to order feed items by, followed by `asc` or `desc` * postfix. The only valid field is * `creation_time`. This list is case-insensitive, default sorting order is * ascending, and the redundant space characters are insignificant. * * Example: `creation_time desc` * </pre> * * <code>string order_by = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The orderBy. */ public java.lang.String getOrderBy() { java.lang.Object ref = orderBy_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); orderBy_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. Field to order feed items by, followed by `asc` or `desc` * postfix. The only valid field is * `creation_time`. This list is case-insensitive, default sorting order is * ascending, and the redundant space characters are insignificant. * * Example: `creation_time desc` * </pre> * * <code>string order_by = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for orderBy. */ public com.google.protobuf.ByteString getOrderByBytes() { java.lang.Object ref = orderBy_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); orderBy_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. Field to order feed items by, followed by `asc` or `desc` * postfix. The only valid field is * `creation_time`. This list is case-insensitive, default sorting order is * ascending, and the redundant space characters are insignificant. * * Example: `creation_time desc` * </pre> * * <code>string order_by = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The orderBy to set. * @return This builder for chaining. */ public Builder setOrderBy(java.lang.String value) { if (value == null) { throw new NullPointerException(); } orderBy_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Field to order feed items by, followed by `asc` or `desc` * postfix. The only valid field is * `creation_time`. This list is case-insensitive, default sorting order is * ascending, and the redundant space characters are insignificant. * * Example: `creation_time desc` * </pre> * * <code>string order_by = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearOrderBy() { orderBy_ = getDefaultInstance().getOrderBy(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Optional. Field to order feed items by, followed by `asc` or `desc` * postfix. The only valid field is * `creation_time`. This list is case-insensitive, default sorting order is * ascending, and the redundant space characters are insignificant. * * Example: `creation_time desc` * </pre> * * <code>string order_by = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for orderBy to set. * @return This builder for chaining. */ public Builder setOrderByBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); orderBy_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Optional. The maximum number of feed items fetched with each request. * </pre> * * <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Optional. The maximum number of feed items fetched with each request. * </pre> * * <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. The maximum number of feed items fetched with each request. * </pre> * * <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000004); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. A token identifying the page of results to return. If * unspecified, it retrieves the first page. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. A token identifying the page of results to return. If * unspecified, it retrieves the first page. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. A token identifying the page of results to return. If * unspecified, it retrieves the first page. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Optional. A token identifying the page of results to return. If * unspecified, it retrieves the first page. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Optional. A token identifying the page of results to return. If * unspecified, it retrieves the first page. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.support.v2beta.ShowFeedRequest) } // @@protoc_insertion_point(class_scope:google.cloud.support.v2beta.ShowFeedRequest) private static final com.google.cloud.support.v2beta.ShowFeedRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.support.v2beta.ShowFeedRequest(); } public static com.google.cloud.support.v2beta.ShowFeedRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ShowFeedRequest> PARSER = new com.google.protobuf.AbstractParser<ShowFeedRequest>() { @java.lang.Override public ShowFeedRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ShowFeedRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ShowFeedRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.support.v2beta.ShowFeedRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/derby
36,403
java/org.apache.derby.client/org/apache/derby/client/net/NetXAResource.java
/* Derby - Class org.apache.derby.client.net.NetXAResource Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /********************************************************************** * * * Component Name = * * Package Name = org.apache.derby.client.net * * Descriptive Name = class implements XAResource * * Status = New code * * Function = Handle XA methods * * List of Classes * - NetXAResource * * Restrictions : None * **********************************************************************/ package org.apache.derby.client.net; import java.net.InetAddress; import java.net.UnknownHostException; import javax.sql.XAConnection; import javax.transaction.xa.XAException; import javax.transaction.xa.XAResource; import javax.transaction.xa.Xid; import org.apache.derby.client.ClientXid; import org.apache.derby.client.am.ClientConnection; import org.apache.derby.client.am.SqlException; import org.apache.derby.client.am.ClientMessageId; import org.apache.derby.client.am.SqlCode; import org.apache.derby.client.am.Utils; import org.apache.derby.client.am.XaException; import org.apache.derby.shared.common.reference.SQLState; public class NetXAResource implements XAResource { private static final int INITIAL_CALLINFO_ELEMENTS = 1; static final ClientXid nullXid = new ClientXid(); // xaFunction defines, shows which queued XA function is being performed static final int XAFUNC_COMMIT = 1; private static final int XAFUNC_END = 2; private static final int XAFUNC_FORGET = 3; private static final int XAFUNC_PREPARE = 4; private static final int XAFUNC_RECOVER = 5; static final int XAFUNC_ROLLBACK = 6; private static final int XAFUNC_START = 7; private static final String XAFUNCSTR_NONE = "No XA Function"; private static final String XAFUNCSTR_COMMIT = "XAResource.commit()"; private static final String XAFUNCSTR_END = "XAResource.end()"; private static final String XAFUNCSTR_FORGET = "XAResource.forget()"; private static final String XAFUNCSTR_PREPARE = "XAResource.prepare()"; private static final String XAFUNCSTR_RECOVER = "XAResource.recover()"; private static final String XAFUNCSTR_ROLLBACK = "XAResource.rollback()"; private static final String XAFUNCSTR_START = "XAResource.start()"; SqlException exceptionsOnXA = null; NetXAConnection netXAConn_; NetConnection conn_; private boolean keepIsolationLevel; // TODO: change to a single callInfo field (not an array) NetXACallInfo callInfoArray_[] = new NetXACallInfo[INITIAL_CALLINFO_ELEMENTS]; /** The value of the transaction timeout in seconds. */ private int timeoutSeconds = 0; public NetXAResource(XAConnection xaconn, NetXAConnection conn) { conn_ = conn.getNetConnection(); netXAConn_ = conn; conn.setNetXAResource(this); // link the primary connection to the first XACallInfo element conn_.currXACallInfoOffset_ = 0; // construct the NetXACallInfo object for the array. for (int i = 0; i < INITIAL_CALLINFO_ELEMENTS; ++i) { callInfoArray_[i] = new NetXACallInfo(null, XAResource.TMNOFLAGS, null); } // initialize the first XACallInfo element with the information from the // primary connection callInfoArray_[0].actualConn_ = conn; // ~~~ save conn_ connection variables in callInfoArray_[0] callInfoArray_[0].saveConnectionVariables(); } public void commit(Xid xid, boolean onePhase) throws XAException { NetAgent netAgent = conn_.netAgent_; int rc = XAResource.XA_OK; exceptionsOnXA = null; if (conn_.agent_.loggingEnabled()) { conn_.agent_.logWriter_.traceEntry(this, "commit", xid, onePhase); } if (conn_.isPhysicalConnClosed()) { connectionClosedFailure(); } // update the XACallInfo NetXACallInfo callInfo = callInfoArray_[conn_.currXACallInfoOffset_]; callInfo.xaFlags_ = (onePhase ? XAResource.TMONEPHASE : XAResource.TMNOFLAGS); callInfo.xid_ = xid; callInfo.xaRetVal_ = XAResource.XA_OK; // initialize XARETVAL try { netAgent.beginWriteChainOutsideUOW(); netAgent.netConnectionRequest_.writeXaCommit(conn_, xid); netAgent.flowOutsideUOW(); netAgent.netConnectionReply_.readXaCommit(conn_); if (callInfo.xaRetVal_ != XAResource.XA_OK) { // xaRetVal has possible error, format it callInfo.xaFunction_ = XAFUNC_COMMIT; rc = xaRetValErrorAccumSQL(callInfo, rc); callInfo.xaRetVal_ = XAResource.XA_OK; // re-initialize XARETVAL } netAgent.endReadChain(); } catch (SqlException sqle) { rc = getSqlExceptionXAErrorCode(sqle); exceptionsOnXA = Utils.accumulateSQLException(sqle, exceptionsOnXA); } if (rc != XAResource.XA_OK) { throwXAException(rc); } } /** * Get XAException.errorCode from SqlException * For disconnect exception, return XAER_RMFAIL * For other exceptions return XAER_RMERR * * For server side SQLExceptions during * XA operations the errorCode has already been determined * and wrapped in an XAException for return to the client. * see EmbedXAResource.wrapInXAException * * @param sqle SqlException to evaluate. * @return XAException.XAER_RMFAIL for disconnect exception, * XAException.XAER_RMERR for other exceptions. */ private int getSqlExceptionXAErrorCode(SqlException sqle) { int seErrorCode = sqle.getErrorCode(); return (seErrorCode == 40000 ? XAException.XAER_RMFAIL : XAException.XAER_RMERR); } /** * <p> * Ends the work performed on behalf of a transaction branch. The resource manager dissociates the XA resource from * the transaction branch specified and let the transaction be completed. * </p> * <p> * If TMSUSPEND is specified in flags, the transaction branch is temporarily suspended in incomplete state. The * transaction context is in suspened state and must be resumed via start with TMRESUME specified. * </p> * <p> * If TMFAIL is specified, the portion of work has failed. The resource manager may mark the transaction as * rollback-only * </p> * <p> * If TMSUCCESS is specified, the portion of work has completed successfully. * </p> * * @param xid A global transaction identifier that is the same as what was used previously in the start method. * @param flags One of TMSUCCESS, TMFAIL, or TMSUSPEND * * @throws XAException An error has occurred. Possible XAException values are XAER_RMERR, XAER_RMFAILED, XAER_NOTA, * XAER_INVAL, XAER_PROTO, or XA_RB*. */ public void end(Xid xid, int flags) throws XAException { NetAgent netAgent = conn_.netAgent_; int rc = XAResource.XA_OK; exceptionsOnXA = null; if (conn_.agent_.loggingEnabled()) { conn_.agent_.logWriter_.traceEntry(this, "end", xid, flags); } if (conn_.isPhysicalConnClosed()) { connectionClosedFailure(); } NetXACallInfo callInfo = callInfoArray_[conn_.currXACallInfoOffset_]; callInfo.xaFlags_ = flags; callInfo.xid_ = xid; callInfo.xaRetVal_ = XAResource.XA_OK; // initialize XARETVAL try { netAgent.beginWriteChainOutsideUOW(); netAgent.netConnectionRequest_.writeXaEndUnitOfWork(conn_); netAgent.flowOutsideUOW(); rc = netAgent.netConnectionReply_.readXaEndUnitOfWork(conn_); if (callInfo.xaRetVal_ != XAResource.XA_OK) { // xaRetVal has possible error, format it callInfo.xaFunction_ = XAFUNC_END; rc = xaRetValErrorAccumSQL(callInfo, rc); callInfo.xaRetVal_ = XAResource.XA_OK; // re-initialize XARETVAL } netAgent.endReadChain(); } catch (SqlException sqle) { rc = getSqlExceptionXAErrorCode(sqle); exceptionsOnXA = Utils.accumulateSQLException(sqle, exceptionsOnXA); } if (rc != XAResource.XA_OK) { // The corresponding XA connection association state // is changed by setXaStateForXAException inside the call // to throwXAException according the error code of the XAException // to be thrown. throwXAException(rc); }else { conn_.setXAState(ClientConnection.XA_T0_NOT_ASSOCIATED); } } /** * Tell the resource manager to forget about a heuristically (MANUALLY) completed transaction branch. * * @param xid A global transaction identifier * * @throws XAException An error has occurred. Possible exception values are XAER_RMERR, XAER_RMFAIL, XAER_NOTA, * XAER_INVAL, or XAER_PROTO. */ public void forget(Xid xid) throws XAException { NetAgent netAgent = conn_.netAgent_; int rc = XAResource.XA_OK; exceptionsOnXA = null; if (conn_.agent_.loggingEnabled()) { conn_.agent_.logWriter_.traceEntry(this, "forget", xid); } if (conn_.isPhysicalConnClosed()) { connectionClosedFailure(); } NetXACallInfo callInfo = callInfoArray_[conn_.currXACallInfoOffset_]; callInfo.xid_ = xid; callInfo.xaRetVal_ = XAResource.XA_OK; // initialize XARETVAL try { // flow the required PROTOCOL to the server netAgent.beginWriteChainOutsideUOW(); // sent the commit PROTOCOL netAgent.netConnectionRequest_.writeXaForget(netAgent.netConnection_, xid); netAgent.flowOutsideUOW(); // read the reply to the commit netAgent.netConnectionReply_.readXaForget(netAgent.netConnection_); netAgent.endReadChain(); if (callInfo.xaRetVal_ != XAResource.XA_OK) { // xaRetVal has possible error, format it callInfo.xaFunction_ = XAFUNC_FORGET; rc = xaRetValErrorAccumSQL(callInfo, rc); callInfo.xaRetVal_ = XAResource.XA_OK; // re-initialize XARETVAL } } catch (SqlException sqle) { exceptionsOnXA = Utils.accumulateSQLException(sqle, exceptionsOnXA); throwXAException(getSqlExceptionXAErrorCode(sqle)); } if (rc != XAResource.XA_OK) { throwXAException(rc); } } /** * Obtain the current transaction timeout value set for this XAResource * instance. If XAResource.setTransactionTimeout was not use prior to * invoking this method, the return value is 0; otherwise, the value * used in the previous setTransactionTimeout call is returned. * * @return the transaction timeout value in seconds. If the returned value * is equal to Integer.MAX_VALUE it means no timeout. */ public int getTransactionTimeout() throws XAException { if (conn_.agent_.loggingEnabled()) { conn_.agent_.logWriter_.traceEntry(this, "getTransactionTimeout"); } exceptionsOnXA = null; if (conn_.isPhysicalConnClosed()) { connectionClosedFailure(); } if (conn_.agent_.loggingEnabled()) { conn_.agent_.logWriter_.traceExit(this, "getTransactionTimeout", timeoutSeconds); } return timeoutSeconds; } /** * Ask the resource manager to prepare for a transaction commit of the transaction specified in xid. * * @param xid A global transaction identifier * * @return A value indicating the resource manager's vote on the outcome of the transaction. The possible values * are: XA_RDONLY or XA_OK. If the resource manager wants to roll back the transaction, it should do so by * raising an appropriate XAException in the prepare method. * * @throws XAException An error has occurred. Possible exception values are: XA_RB*, XAER_RMERR, XAER_RMFAIL, * XAER_NOTA, XAER_INVAL, or XAER_PROTO. */ public int prepare(Xid xid) throws XAException { // public interface for prepare // just call prepareX with the recursion flag set to true exceptionsOnXA = null; if (conn_.agent_.loggingEnabled()) { conn_.agent_.logWriter_.traceEntry(this, "prepare", xid); } if (conn_.isPhysicalConnClosed()) { connectionClosedFailure(); } /// update the XACallInfo NetAgent netAgent = conn_.netAgent_; int rc = XAResource.XA_OK; NetXACallInfo callInfo = callInfoArray_[conn_.currXACallInfoOffset_]; callInfo.xid_ = xid; callInfo.xaRetVal_ = XAResource.XA_OK; // initialize XARETVAL try { netAgent.beginWriteChainOutsideUOW(); // sent the prepare PROTOCOL netAgent.netConnectionRequest_.writeXaPrepare(conn_); netAgent.flowOutsideUOW(); // read the reply to the prepare rc = netAgent.netConnectionReply_.readXaPrepare(conn_); if ((callInfo.xaRetVal_ != XAResource.XA_OK) && (callInfo.xaRetVal_ != XAException.XA_RDONLY)) { // xaRetVal has possible error, format it callInfo.xaFunction_ = XAFUNC_PREPARE; rc = xaRetValErrorAccumSQL(callInfo, rc); callInfo.xaRetVal_ = XAResource.XA_OK; // re-initialize XARETVAL } netAgent.endReadChain(); } catch (SqlException sqle) { rc = getSqlExceptionXAErrorCode(sqle); exceptionsOnXA = Utils.accumulateSQLException(sqle, exceptionsOnXA); } if ((rc != XAResource.XA_OK ) && (rc != XAResource.XA_RDONLY)) { throwXAException(rc); } if (conn_.agent_.loggingEnabled()) { conn_.agent_.logWriter_.traceExit(this, "prepare", rc); } return rc; } /** * Obtain a list of prepared transaction branches from a resource manager. The transaction manager calls this method * during recovery to obtain the list of transaction branches that are currently in prepared or heuristically * completed states. * * @param flag One of TMSTARTRSCAN, TMENDRSCAN, TMNOFLAGS. TMNOFLAGS must be used when no other flags are set in * flags. * * @return The resource manager returns zero or more XIDs for the transaction branches that are currently in a * prepared or heuristically completed state. If an error occurs during the operation, the resource manager * should raise the appropriate XAException. * * @throws XAException An error has occurred. Possible values are XAER_RMERR, XAER_RMFAIL, XAER_INVAL, and * XAER_PROTO. */ public Xid[] recover(int flag) throws XAException { int rc = XAResource.XA_OK; NetAgent netAgent = conn_.netAgent_; if (conn_.agent_.loggingEnabled()) { conn_.agent_.logWriter_.traceEntry(this, "recover", flag); } exceptionsOnXA = null; if (conn_.isPhysicalConnClosed()) { connectionClosedFailure(); } Xid[] xidList = null; NetXACallInfo callInfo = callInfoArray_[conn_.currXACallInfoOffset_]; callInfo.xaFlags_ = flag; callInfo.xaRetVal_ = XAResource.XA_OK; // initialize XARETVAL try { netAgent.beginWriteChainOutsideUOW(); // sent the recover PROTOCOL netAgent.netConnectionRequest_.writeXaRecover(conn_, flag); netAgent.flowOutsideUOW(); netAgent.netConnectionReply_.readXaRecover(conn_); if (callInfo.xaRetVal_ != XAResource.XA_OK) { // xaRetVal has possible error, format it callInfo.xaFunction_ = XAFUNC_RECOVER; rc = xaRetValErrorAccumSQL(callInfo, rc); callInfo.xaRetVal_ = XAResource.XA_OK; // re-initialize XARETVAL } netAgent.endReadChain(); xidList = conn_.getIndoubtTransactionIds(); } catch (SqlException sqle) { rc = getSqlExceptionXAErrorCode(sqle); exceptionsOnXA = Utils.accumulateSQLException(sqle, exceptionsOnXA); } if (rc != XAResource.XA_OK) { throwXAException(rc); } if (conn_.agent_.loggingEnabled()) { conn_.agent_.logWriter_.traceExit(this, "recover", xidList); } return xidList; } /** * Inform the resource manager to roll back work done on behalf of a transaction branch * * @param xid A global transaction identifier * * @throws XAException An error has occurred */ public void rollback(Xid xid) throws XAException { NetAgent netAgent = conn_.netAgent_; int rc = XAResource.XA_OK; exceptionsOnXA = null; if (conn_.agent_.loggingEnabled()) { conn_.agent_.logWriter_.traceEntry(this, "rollback", xid); } if (conn_.isPhysicalConnClosed()) { connectionClosedFailure(); } // update the XACallInfo NetXACallInfo callInfo = callInfoArray_[conn_.currXACallInfoOffset_]; callInfo.xid_ = xid; callInfo.xaRetVal_ = XAResource.XA_OK; // initialize XARETVAL try { netAgent.beginWriteChainOutsideUOW(); netAgent.netConnectionRequest_.writeXaRollback(conn_, xid); netAgent.flowOutsideUOW(); // read the reply to the rollback rc = netAgent.netConnectionReply_.readXaRollback(conn_); netAgent.endReadChain(); if (callInfo.xaRetVal_ != XAResource.XA_OK) { // xaRetVal has possible error, format it callInfo.xaFunction_ = XAFUNC_END; rc = xaRetValErrorAccumSQL(callInfo, rc); callInfo.xaRetVal_ = XAResource.XA_OK; // re-initialize XARETVAL } } catch (SqlException sqle) { rc = getSqlExceptionXAErrorCode(sqle); exceptionsOnXA = Utils.accumulateSQLException(sqle, exceptionsOnXA); } if (rc != XAResource.XA_OK) { throwXAException(rc); } } /** * Set the current transaction timeout value for this XAResource * instance. Once set, this timeout value is effective until * setTransactionTimeout is invoked again with a different value. To reset * the timeout value to the default value used by the resource manager, * set the value to zero. If the timeout operation is performed * successfully, the method returns true; otherwise false. If a resource * manager does not support transaction timeout value to be set * explicitly, this method returns false. * * @param seconds the transaction timeout value in seconds. * Value of 0 means the reasource manager's default value. * Value of Integer.MAX_VALUE means no timeout. * @return true if transaction timeout value is set successfully; * otherwise false. * * @exception XAException - An error has occurred. Possible exception * values are XAER_RMERR, XAER_RMFAIL, or XAER_INVAL. */ public boolean setTransactionTimeout(int seconds) throws XAException { if (conn_.agent_.loggingEnabled()) { conn_.agent_.logWriter_.traceEntry(this, "setTransactionTimeout"); } if (seconds < 0) { // throw an exception if invalid value was specified throw new XAException(XAException.XAER_INVAL); } exceptionsOnXA = null; timeoutSeconds = seconds; if (conn_.agent_.loggingEnabled()) { conn_.agent_.logWriter_.traceExit(this, "setTransactionTimeout", true); } return true; } public void setKeepCurrentIsolationLevel(boolean flag) { keepIsolationLevel = flag; } public boolean keepCurrentIsolationLevel() { return keepIsolationLevel; } /** * Start work on behalf of a transaction branch specified in xid * * @param xid A global transaction identifier to be associated with the resource * @param flags One of TMNOFLAGS, TMJOIN, or TMRESUME * * @throws XAException An error has occurred. Possible exceptions * are XA_RB*, XAER_RMERR, XAER_RMFAIL, * XAER_DUPID, XAER_OUTSIDE, XAER_NOTA, XAER_INVAL, or XAER_PROTO. */ public synchronized void start(Xid xid, int flags) throws XAException { NetAgent netAgent = conn_.netAgent_; int rc = XAResource.XA_OK; exceptionsOnXA = null; if (conn_.agent_.loggingEnabled()) { conn_.agent_.logWriter_.traceEntry(this, "start", xid, flags); } if (conn_.isPhysicalConnClosed()) { connectionClosedFailure(); } // DERBY-1025 - Flow an auto-commit if in auto-commit mode before // entering a global transaction try { if(conn_.autoCommit_) conn_.flowAutoCommit(); } catch (SqlException sqle) { rc = getSqlExceptionXAErrorCode(sqle); exceptionsOnXA = Utils.accumulateSQLException(sqle, exceptionsOnXA); } // update the XACallInfo NetXACallInfo callInfo = callInfoArray_[conn_.currXACallInfoOffset_]; callInfo.xaFlags_ = flags; callInfo.xid_ = xid; callInfo.xaRetVal_ = XAResource.XA_OK; // initialize XARETVAL // check and setup the transaction timeout settings if (flags == TMNOFLAGS) { if (timeoutSeconds == Integer.MAX_VALUE) { // Disable the transaction timeout. callInfo.xaTimeoutMillis_ = 0; } else if (timeoutSeconds > 0) { // Use the timeout value specified. callInfo.xaTimeoutMillis_ = 1000*timeoutSeconds; } else if (timeoutSeconds == 0) { // The -1 value means that the timeout codepoint // will not be sent in the request and thus the server // will use the default value. callInfo.xaTimeoutMillis_ = -1; } else { // This should not ever happen due that setTransactionTimeout // does not allow a negative value throwXAException(XAException.XAER_RMERR); } } try { netAgent.beginWriteChainOutsideUOW(); netAgent.netConnectionRequest_.writeXaStartUnitOfWork(conn_); netAgent.flowOutsideUOW(); netAgent.netConnectionReply_.readXaStartUnitOfWork(conn_); if (callInfo.xaRetVal_ != XAResource.XA_OK) { // xaRetVal has possible error, format it callInfo.xaFunction_ = XAFUNC_START; rc = xaRetValErrorAccumSQL(callInfo, rc); callInfo.xaRetVal_ = XAResource.XA_OK; // re-initialize XARETVAL } // Setting this is currently required to avoid client from sending // commit for autocommit. if (rc == XAResource.XA_OK) { conn_.setXAState(ClientConnection.XA_T1_ASSOCIATED); } } catch (SqlException sqle) { rc = getSqlExceptionXAErrorCode(sqle); exceptionsOnXA = Utils.accumulateSQLException(sqle, exceptionsOnXA); } if (rc != XAResource.XA_OK) { throwXAException(rc); } } private String getXAExceptionText(int rc) { String xaExceptionText; switch (rc) { case XAException.XA_RBROLLBACK: xaExceptionText = "XA_RBROLLBACK"; break; case XAException.XA_RBCOMMFAIL: xaExceptionText = "XA_RBCOMMFAIL"; break; case XAException.XA_RBDEADLOCK: xaExceptionText = "XA_RBDEADLOCK"; break; case XAException.XA_RBINTEGRITY: xaExceptionText = "XA_RBINTEGRITY"; break; case XAException.XA_RBOTHER: xaExceptionText = "XA_RBOTHER"; break; case XAException.XA_RBPROTO: xaExceptionText = "XA_RBPROTO"; break; case XAException.XA_RBTIMEOUT: xaExceptionText = "XA_RBTIMEOUT"; break; case XAException.XA_RBTRANSIENT: xaExceptionText = "XA_RBTRANSIENT"; break; case XAException.XA_NOMIGRATE: xaExceptionText = "XA_NOMIGRATE"; break; case XAException.XA_HEURHAZ: xaExceptionText = "XA_HEURHAZ"; break; case XAException.XA_HEURCOM: xaExceptionText = "XA_HEURCOM"; break; case XAException.XA_HEURRB: xaExceptionText = "XA_HEURRB"; break; case XAException.XA_HEURMIX: xaExceptionText = "XA_HEURMIX"; break; case XAException.XA_RETRY: xaExceptionText = "XA_RETRY"; break; case XAException.XA_RDONLY: xaExceptionText = "XA_RDONLY"; break; case XAException.XAER_ASYNC: xaExceptionText = "XAER_ASYNC"; break; case XAException.XAER_RMERR: xaExceptionText = "XAER_RMERR"; break; case XAException.XAER_NOTA: xaExceptionText = "XAER_NOTA"; break; case XAException.XAER_INVAL: xaExceptionText = "XAER_INVAL"; break; case XAException.XAER_PROTO: xaExceptionText = "XAER_PROTO"; break; case XAException.XAER_RMFAIL: xaExceptionText = "XAER_RMFAIL"; break; case XAException.XAER_DUPID: xaExceptionText = "XAER_DUPID"; break; case XAException.XAER_OUTSIDE: xaExceptionText = "XAER_OUTSIDE"; break; case XAResource.XA_OK: xaExceptionText = "XA_OK"; break; default: xaExceptionText = "Unknown Error"; break; } return xaExceptionText; } private void throwXAException(int rc) throws XAException { StringBuilder xaExceptionText = new StringBuilder(64); xaExceptionText.append(getXAExceptionText(rc)); // save the SqlException chain to add it to the XAException SqlException sqlExceptions = exceptionsOnXA; while (exceptionsOnXA != null) { // one or more SqlExceptions received, format them xaExceptionText.append(" : ").append(exceptionsOnXA.getMessage()); exceptionsOnXA = (SqlException) exceptionsOnXA.getNextException(); } XaException xaException = new XaException( conn_.agent_.logWriter_, sqlExceptions, xaExceptionText.toString()); xaException.errorCode = rc; setXaStateForXAException(rc); throw xaException; } /** * Reset the transaction branch association state to XA_T0_NOT_ASSOCIATED * for XAER_RM* and XA_RB* Exceptions. All other exceptions leave the state * unchanged * * @param rc // return code from XAException * @throws XAException */ private void setXaStateForXAException(int rc) { switch (rc) { // Reset to T0, not associated for XA_RB*, RM* // XAER_RMFAIL and XAER_RMERR will be fatal to the connection // but that is not dealt with here case XAException.XAER_RMFAIL: case XAException.XAER_RMERR: case XAException.XA_RBROLLBACK: case XAException.XA_RBCOMMFAIL: case XAException.XA_RBDEADLOCK: case XAException.XA_RBINTEGRITY: case XAException.XA_RBOTHER: case XAException.XA_RBPROTO: case XAException.XA_RBTIMEOUT: case XAException.XA_RBTRANSIENT: conn_.setXAState(ClientConnection.XA_T0_NOT_ASSOCIATED); break; // No change for other XAExceptions // XAException.XA_NOMIGRATE //XAException.XA_HEURHAZ // XAException.XA_HEURCOM // XAException.XA_HEURRB // XAException.XA_HEURMIX // XAException.XA_RETRY // XAException.XA_RDONLY // XAException.XAER_ASYNC // XAException.XAER_NOTA // XAException.XAER_INVAL // XAException.XAER_PROTO // XAException.XAER_DUPID // XAException.XAER_OUTSIDE default: return; } } public boolean isSameRM(XAResource xares) throws XAException { boolean isSame = false; // preset that the RMs are NOT the same exceptionsOnXA = null; if (conn_.agent_.loggingEnabled()) { conn_.agent_.logWriter_.traceEntry(this, "isSameRM", xares); } if (conn_.isPhysicalConnClosed()) { connectionClosedFailure(); } if (xares instanceof NetXAResource) { // Both are NetXAResource so check to see if this is the same RM. // Remember, isSame is initialized to false NetXAResource derbyxares = (NetXAResource) xares; while (true) { if (!conn_.databaseName_.equalsIgnoreCase(derbyxares.conn_.databaseName_)) { break; // database names are not equal, not same RM } if (!conn_.netAgent_.server_.equalsIgnoreCase (derbyxares.conn_.netAgent_.server_)) { // server name strings not equal, compare IP addresses try { // 1st convert "localhost" to actual server name String server1 = this.processLocalHost(conn_.netAgent_.server_); String server2 = this.processLocalHost(derbyxares.conn_.netAgent_.server_); // now convert the server name to ip address InetAddress serverIP1 = InetAddress.getByName(server1); InetAddress serverIP2 = InetAddress.getByName(server2); if (!serverIP1.equals(serverIP2)) { break; // server IPs are not equal, not same RM } } catch (UnknownHostException ue) { break; } } if (conn_.netAgent_.port_ != derbyxares.conn_.netAgent_.port_) { break; // ports are not equal, not same RM } isSame = true; // everything the same, set RMs are the same break; } } if (conn_.agent_.loggingEnabled()) { conn_.agent_.logWriter_.traceExit (this, "isSameRM", isSame); } return isSame; } public static boolean xidsEqual(Xid xid1, Xid xid2) { // determine if the 2 xids contain the same values even if not same object // comapre the format ids if (xid1.getFormatId() != xid2.getFormatId()) { return false; // format ids are not the same } // compare the global transaction ids int xid1Length = xid1.getGlobalTransactionId().length; if (xid1Length != xid2.getGlobalTransactionId().length) { return false; // length of the global trans ids are not the same } byte[] xid1Bytes = xid1.getGlobalTransactionId(); byte[] xid2Bytes = xid2.getGlobalTransactionId(); int i; for (i = 0; i < xid1Length; ++i) { // check all bytes are the same if (xid1Bytes[i] != xid2Bytes[i]) { return false; // bytes in the global trans ids are not the same } } // compare the branch qualifiers xid1Length = xid1.getBranchQualifier().length; if (xid1Length != xid2.getBranchQualifier().length) { return false; // length of the global trans ids are not the same } xid1Bytes = xid1.getBranchQualifier(); xid2Bytes = xid2.getBranchQualifier(); for (i = 0; i < xid1Length; ++i) { // check all bytes are the same if (xid1Bytes[i] != xid2Bytes[i]) { return false; // bytes in the global trans ids are not the same } } return true; // all of the fields are the same, xid1 == xid2 } private void connectionClosedFailure() throws XAException { // throw an XAException XAER_RMFAIL, with a chained SqlException - closed exceptionsOnXA = Utils.accumulateSQLException (new SqlException(null, new ClientMessageId(SQLState.NO_CURRENT_CONNECTION)), exceptionsOnXA); throwXAException(XAException.XAER_RMFAIL); } private String getXAFuncStr(int xaFunc) { switch (xaFunc) { case XAFUNC_COMMIT: return XAFUNCSTR_COMMIT; case XAFUNC_END: return XAFUNCSTR_END; case XAFUNC_FORGET: return XAFUNCSTR_FORGET; case XAFUNC_PREPARE: return XAFUNCSTR_PREPARE; case XAFUNC_RECOVER: return XAFUNCSTR_RECOVER; case XAFUNC_ROLLBACK: return XAFUNCSTR_ROLLBACK; case XAFUNC_START: return XAFUNCSTR_START; } return XAFUNCSTR_NONE; } protected int xaRetValErrorAccumSQL(NetXACallInfo callInfo, int currentRC) { // xaRetVal_ is set by the server to be one of the // standard constants from XAException. int rc = callInfo.xaRetVal_; if (rc != XAResource.XA_OK) { // error was detected // create an SqlException to report this error within SqlException accumSql = new SqlException(conn_.netAgent_.logWriter_, new ClientMessageId(SQLState.NET_XARETVAL_ERROR), SqlCode.queuedXAError, getXAFuncStr(callInfo.xaFunction_), getXAExceptionText(rc)); exceptionsOnXA = Utils.accumulateSQLException (accumSql, exceptionsOnXA); if (currentRC != XAResource.XA_OK) { // the rc passed into this function had an error also, prioritize error if (currentRC < 0) { // rc passed in was a major error use it instead of current error return currentRC; } } } return rc; } private String processLocalHost(String serverName) { if (serverName.equalsIgnoreCase("localhost")) { // this is a localhost, find hostname try { InetAddress localhostNameIA = InetAddress.getLocalHost(); String localhostName = localhostNameIA.getHostName(); return localhostName; } catch (SecurityException se) { return serverName; } catch (UnknownHostException ue) { return serverName; } } // not "localhost", return original server name return serverName; } }
apache/shindig
36,055
java/social-api/src/main/java/org/apache/shindig/social/opensocial/model/Person.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.shindig.social.opensocial.model; import org.apache.shindig.protocol.model.Enum; import org.apache.shindig.protocol.model.Exportablebean; import org.apache.shindig.social.core.model.PersonImpl; import com.google.common.base.Functions; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Maps; import com.google.inject.ImplementedBy; import java.util.Date; import java.util.EnumSet; import java.util.List; import java.util.Map; import java.util.Set; /** * see <a href="http://www.opensocial.org/Technical-Resources/opensocial-spec-v081/opensocial-reference#opensocial.Person.Field"> * http://www.opensocial.org/Technical-Resources/opensocial-spec-v081/opensocial-reference#opensocial.Person.Field</a> * for all field meanings. All fields are represented in the js api at this time except for lastUpdated. * This field is currently only in the RESTful spec. * */ @ImplementedBy(PersonImpl.class) @Exportablebean public interface Person { /** * The type of a profile url when represented as a list field. */ String PROFILE_URL_TYPE = "profile"; /** * The type of thumbnail photo types when represented as list fields. */ String THUMBNAIL_PHOTO_TYPE = "thumbnail"; /** * The display name for the user. * @return the display name */ String getDisplayName(); /** * Set the display name. * @param displayName the new display name. */ void setDisplayName(String displayName); /** * Enumeration of genders. */ public enum Gender { /** * Female. */ female, /** * Male. */ male } /** * The fields that represent the person object in json form. */ public static enum Field { /** the json field for aboutMe. */ ABOUT_ME("aboutMe"), /** the json field for accounts. */ ACCOUNTS("accounts"), /** the json field for activities. */ ACTIVITIES("activities"), /** the json field for addresses. */ ADDRESSES("addresses"), /** the json field for age. */ AGE("age"), /** the json field for appData. */ APP_DATA("appData"), /** the json field for bodyType. */ BODY_TYPE("bodyType"), /** the json field for books. */ BOOKS("books"), /** the json field for cars. */ CARS("cars"), /** the json field for children. */ CHILDREN("children"), /** the json field for currentLocation. */ CURRENT_LOCATION("currentLocation"), /** the json field for birthday. */ BIRTHDAY("birthday"), /** the json field for display name. */ DISPLAY_NAME("displayName"), /** Needed to support the RESTful api. */ /** the json field for drinker. */ DRINKER("drinker"), /** the json field for emails. */ EMAILS("emails"), /** the json field for ethnicity. */ ETHNICITY("ethnicity"), /** the json field for fashion. */ FASHION("fashion"), /** the json field for food. */ FOOD("food"), /** the json field for gender. */ GENDER("gender"), /** the json field for happiestWhen. */ HAPPIEST_WHEN("happiestWhen"), /** the json field for hasApp. */ HAS_APP("hasApp"), /** the json field for heroes. */ HEROES("heroes"), /** the json field for humor. */ HUMOR("humor"), /** the json field for id. */ ID("id"), /** the json field for IM accounts. */ IMS("ims"), /** the json field for interests. */ INTERESTS("interests"), /** the json field for jobInterests. */ JOB_INTERESTS("jobInterests"), /** the json field for languagesSpoken. */ LANGUAGES_SPOKEN("languagesSpoken"), /** the json field for updated. */ LAST_UPDATED("updated"), /** Needed to support the RESTful api. */ /** the json field for livingArrangement. */ LIVING_ARRANGEMENT("livingArrangement"), /** the json field for lookingFor. */ LOOKING_FOR("lookingFor"), /** the json field for movies. */ MOVIES("movies"), /** the json field for music. */ MUSIC("music"), /** the json field for name. */ NAME("name"), /** the json field for networkPresence. */ NETWORKPRESENCE("networkPresence"), /** the json field for nickname. */ NICKNAME("nickname"), /** the json field for organiztions. */ ORGANIZATIONS("organizations"), /** the json field for pets. */ PETS("pets"), /** the json field for phoneNumbers. */ PHONE_NUMBERS("phoneNumbers"), /** the json field for photos. */ PHOTOS("photos"), /** the json field for politicalViews. */ POLITICAL_VIEWS("politicalViews"), /** the json field for preferredUsername */ PREFERRED_USERNAME("preferredUsername"), /** the json field for profileSong. */ PROFILE_SONG("profileSong"), /** the json field for profileUrl. */ PROFILE_URL("profileUrl"), /** the json field for profileVideo. */ PROFILE_VIDEO("profileVideo"), /** the json field for quotes. */ QUOTES("quotes"), /** the json field for relationshipStatus. */ RELATIONSHIP_STATUS("relationshipStatus"), /** the json field for religion. */ RELIGION("religion"), /** the json field for romance. */ ROMANCE("romance"), /** the json field for scaredOf. */ SCARED_OF("scaredOf"), /** the json field for sexualOrientation. */ SEXUAL_ORIENTATION("sexualOrientation"), /** the json field for smoker. */ SMOKER("smoker"), /** the json field for sports. */ SPORTS("sports"), /** the json field for status. */ STATUS("status"), /** the json field for tags. */ TAGS("tags"), /** the json field for thumbnailUrl. */ THUMBNAIL_URL("thumbnailUrl"), /** the json field for utcOffset. */ UTC_OFFSET("utcOffset"), /** the json field for turnOffs. */ TURN_OFFS("turnOffs"), /** the json field for turnOns. */ TURN_ONS("turnOns"), /** the json field for tvShows. */ TV_SHOWS("tvShows"), /** the json field for urls. */ URLS("urls"); /** * a Map to convert json string to Field representations. */ private static final Map<String,Field> LOOKUP = Maps.uniqueIndex(EnumSet.allOf(Field.class), Functions.toStringFunction()); /** * The json field that the instance represents. */ private final String urlString; /** * The set of all fields. */ public static final Set<String> ALL_FIELDS = LOOKUP.keySet(); /** * The set of default fields returned fields. */ public static final Set<String> DEFAULT_FIELDS = ImmutableSet.of( ID.toString(), NAME.toString(), THUMBNAIL_URL.toString()); /** * create a field base on the a json element. * * @param urlString the name of the element */ private Field(String urlString) { this.urlString = urlString; } /** * emit the field as a json element. * * @return the field name */ @Override public String toString() { return this.urlString; } public static Field getField(String jsonString) { return LOOKUP.get(jsonString); } /** * Converts from a url string (usually passed in the fields= parameter) into the * corresponding field enum. * @param urlString The string to translate. * @return The corresponding person field. */ public static Person.Field fromUrlString(String urlString) { return LOOKUP.get(urlString); } } /** * Get a general statement about the person, specified as a string. Container support for this * field is OPTIONAL. * * @return the value of aboutMe */ String getAboutMe(); /** * Set a general statement about the person, specified as a string. Container support for this * field is OPTIONAL. * * @param aboutMe the value of aboutMe */ void setAboutMe(String aboutMe); /** * Get the list of online accounts held by this person. * @return a list of Account objects */ List<Account> getAccounts(); /** * Set the list of online accounts held by this person. * @param accounts a list of Account objects */ void setAccounts(List<Account> accounts); /** * Get the person's favorite activities, specified as an List of strings. Container support for * this field is OPTIONAL. * * @return list of activities. */ List<String> getActivities(); /** * Set the person's favorite activities, specified as an List of strings. * * @param activities a list of activities */ void setActivities(List<String> activities); /** * Get addresses associated with the person, specified as an List of Address objects. Container * support for this field is OPTIONAL. * * @return a List of address objects */ List<Address> getAddresses(); /** * Set addresses associated with the person, specified as an List of Address objects. Container * support for this field is OPTIONAL. * * @param addresses a list of address objects */ void setAddresses(List<Address> addresses); /** * Get the person's age, specified as a number. Container support for this field is OPTIONAL. * * @return the persons age */ Integer getAge(); /** * Set the person's age, specified as a number. Container support for this field is OPTIONAL. * * @param age the persons age */ void setAge(Integer age); /** * Get app data for the person. * * @return the app data, possibly a subset. */ Map<String, ?> getAppData(); /** * Sets app data for the person. * * @param appData the app data, possibly a subset */ void setAppData(Map<String, ?> appData); /** * Get the person's date of birth, specified as a {@link Date} object. Container support for this * field is OPTIONAL. * * @return the person's data of birth */ Date getBirthday(); /** * Set the person's date of birth, specified as a {@link Date} object. Container support for this * field is OPTIONAL. * * @param birthday the person's data of birth */ void setBirthday(Date birthday); /** * Get the person's body characteristics, specified as an BodyType. Container support for this * field is OPTIONAL. * * @return the BodyType */ BodyType getBodyType(); /** * Set the person's body characteristics, specified as an BodyType. Container support for this * field is OPTIONAL. * * @param bodyType the person's BodyType */ void setBodyType(BodyType bodyType); /** * Get the person's favorite books, specified as an List of strings. Container support for this * field is OPTIONAL. * * @return list of books as strings */ List<String> getBooks(); /** * Set the person's favorite books, specified as an List of strings. Container support for this * field is OPTIONAL. * * @param books a list of the person's books */ void setBooks(List<String> books); /** * Get the person's favorite cars, specified as an List of strings. Container support for this * field is OPTIONAL. * * @return the persons favorite cars */ List<String> getCars(); /** * Set the person's favorite cars, specified as an List of strings. Container support for this * field is OPTIONAL. * * @param cars a list of the persons favorite cars */ void setCars(List<String> cars); /** * Get a description of the person's children, specified as a string. Container support for this * field is OPTIONAL. * * @return the persons children */ String getChildren(); /** * Set a description of the person's children, specified as a string. Container support for this * field is OPTIONAL. * * @param children the persons children */ void setChildren(String children); /** * Get the person's current location, specified as an {@link Address}. Container support for this * field is OPTIONAL. * * @return the persons current location */ Address getCurrentLocation(); /** * Set the person's current location, specified as an {@link Address}. Container support for this * field is OPTIONAL. * * @param currentLocation the persons current location */ void setCurrentLocation(Address currentLocation); /** * Get the person's drinking status, specified as an {@link Enum} with the enum's key referencing * {@link Drinker}. Container support for this field is OPTIONAL. * * @return the persons drinking status */ Enum<Drinker> getDrinker(); /** * Get the person's drinking status, specified as an {@link Enum} with the enum's key referencing * {@link Drinker}. Container support for this field is OPTIONAL. * * @param newDrinker the persons drinking status */ void setDrinker(Enum<Drinker> newDrinker); /** * Get the person's Emails associated with the person. * Container support for this field is OPTIONAL. * * @return a list of the person's emails */ List<ListField> getEmails(); /** * Set the person's Emails associated with the person. * Container support for this field is OPTIONAL. * * @param emails a list of the person's emails */ void setEmails(List<ListField> emails); /** * Get the person's ethnicity, specified as a string. Container support for this field is * OPTIONAL. * * @return the person's ethnicity */ String getEthnicity(); /** * Set the person's ethnicity, specified as a string. Container support for this field is * OPTIONAL. * * @param ethnicity the person's ethnicity */ void setEthnicity(String ethnicity); /** * Get the person's thoughts on fashion, specified as a string. Container support for this field * is OPTIONAL. * * @return the person's thoughts on fashion */ String getFashion(); /** * Set the person's thoughts on fashion, specified as a string. Container support for this field * is OPTIONAL. * * @param fashion the person's thoughts on fashion */ void setFashion(String fashion); /** * Get the person's favorite food, specified as an List of strings. Container support for this * field is OPTIONAL. * * @return the person's favorite food */ List<String> getFood(); /** * Set the person's favorite food, specified as an List of strings. Container support for this * field is OPTIONAL. * * @param food the person's favorite food */ void setFood(List<String> food); /** * Get a person's gender, specified as an {@link Gender}. * * @return the person's gender */ Gender getGender(); /** * Set a person's gender, specified as an {@link Gender}. * * @param newGender the person's gender */ void setGender(Gender newGender); /** * Get a description of when the person is happiest, specified as a string. Container support for * this field is OPTIONAL. * * @return a description of when the person is happiest */ String getHappiestWhen(); /** * Set a description of when the person is happiest, specified as a string. Container support for * this field is OPTIONAL. * * @param happiestWhen a description of when the person is happiest */ void setHappiestWhen(String happiestWhen); /** * Get if the person has used the current app. Container support for this field is OPTIONAL. * Has app needs to take account of the context of the application that is performing the * query on this person object. * @return true the current app has been used */ Boolean getHasApp(); /** * Set if the person has used the current app. Container support for this field is OPTIONAL. * * @param hasApp set true the current app has been used */ void setHasApp(Boolean hasApp); /** * Get a person's favorite heroes, specified as an Array of strings. Container support for this * field is OPTIONAL. * * @return a list of the person's favorite heroes */ List<String> getHeroes(); /** * Set a person's favorite heroes, specified as an Array of strings. Container support for this * field is OPTIONAL. * * @param heroes a list of the person's favorite heroes */ void setHeroes(List<String> heroes); /** * Get the person's thoughts on humor, specified as a string. Container support for this field is * OPTIONAL. * * @return the person's thoughts on humor */ String getHumor(); /** * Set the person's thoughts on humor, specified as a string. Container support for this field is * OPTIONAL. * * @param humor the person's thoughts on humor */ void setHumor(String humor); /** * Get A string ID that can be permanently associated with this person. Container support for this * field is REQUIRED. * * @return the permanent ID of the person */ String getId(); /** * Set A string ID that can be permanently associated with this person. Container support for this * field is REQUIRED. * * @param id the permanent ID of the person */ void setId(String id); /** * Get a list of Instant messaging address for this Person. No official canonicalization rules * exist for all instant messaging addresses, but Service Providers SHOULD remove all whitespace * and convert the address to lowercase, if this is appropriate for the service this IM address is * used for. Instead of the standard Canonical Values for type, this field defines the following * Canonical Values to represent currently popular IM services: aim, gtalk, icq, xmpp, msn, skype, * qq, and yahoo. * * @return A list of IM addresses */ List<ListField> getIms(); /** * Set a list of Instant messaging address for this Person. No official canonicalization rules * exist for all instant messaging addresses, but Service Providers SHOULD remove all whitespace * and convert the address to lowercase, if this is appropriate for the service this IM address is * used for. Instead of the standard Canonical Values for type, this field defines the following * Canonical Values to represent currently popular IM services: aim, gtalk, icq, xmpp, msn, skype, * qq, and yahoo. * * @param ims a list ListFields representing IM addresses. */ void setIms(List<ListField> ims); /** * Get the person's interests, hobbies or passions, specified as an List of strings. Container * support for this field is OPTIONAL. * * @return the person's interests, hobbies or passions */ List<String> getInterests(); /** * Set the person's interests, hobbies or passions, specified as an List of strings. Container * support for this field is OPTIONAL. * * @param interests the person's interests, hobbies or passions */ void setInterests(List<String> interests); /** * Get the Person's favorite jobs, or job interests and skills, specified as a string. Container * support for this field is OPTIONAL * * @return the Person's favorite jobs, or job interests and skills */ String getJobInterests(); /** * Set the Person's favorite jobs, or job interests and skills, specified as a string. Container * support for this field is OPTIONAL * * @param jobInterests the Person's favorite jobs, or job interests and skills */ void setJobInterests(String jobInterests); /** * Get a List of the languages that the person speaks as ISO 639-1 codes, specified as an List of * strings. Container support for this field is OPTIONAL. * * @return a List of the languages that the person speaks */ List<String> getLanguagesSpoken(); /** * Set a List of the languages that the person speaks as ISO 639-1 codes, specified as an List of * strings. Container support for this field is OPTIONAL. * * @param languagesSpoken a List of the languages that the person speaks */ void setLanguagesSpoken(List<String> languagesSpoken); /** * The time this person was last updated. * * @return the last update time */ Date getUpdated(); /** * Set the time this record was last updated. * * @param updated the last update time */ void setUpdated(Date updated); /** * Get a description of the person's living arrangement, specified as a string. Container support * for this field is OPTIONAL. * * @return a description of the person's living arrangement */ String getLivingArrangement(); /** * Set a description of the person's living arrangement, specified as a string. Container support * for this field is OPTIONAL. * * @param livingArrangement a description of the person's living arrangement */ void setLivingArrangement(String livingArrangement); /** * Get a person's statement about who or what they are looking for, or what they are interested in * meeting people for. Specified as an List of {@link org.apache.shindig.protocol.model.Enum} with the enum's key referencing * {@link LookingFor} Container support for this field is OPTIONAL. * * @return person's statement about who or what they are looking for */ List<Enum<LookingFor>> getLookingFor(); /** * Get a person's statement about who or what they are looking for, or what they are interested in * meeting people for. Specified as an List of {@link Enum} with the enum's key referencing * {@link LookingFor} Container support for this field is OPTIONAL. * * @param lookingFor person's statement about who or what they are looking for */ void setLookingFor(List<Enum<LookingFor>> lookingFor); /** * Get the Person's favorite movies, specified as an List of strings. Container support for this * field is OPTIONAL. * * @return the Person's favorite movies */ List<String> getMovies(); /** * Set the Person's favorite movies, specified as an List of strings. Container support for this * field is OPTIONAL. * * @param movies the Person's favorite movies */ void setMovies(List<String> movies); /** * Get the Person's favorite music, specified as an List of strings Container support for this * field is OPTIONAL. * * @return Person's favorite music */ List<String> getMusic(); /** * Set the Person's favorite music, specified as an List of strings Container support for this * field is OPTIONAL. * * @param music Person's favorite music */ void setMusic(List<String> music); /** * Get the person's name Container support for this field is REQUIRED. * * @return the person's name */ Name getName(); /** * Set the person's name Container support for this field is REQUIRED. * * @param name the person's name */ void setName(Name name); /** * Get the person's current network status. Specified as an {@link Enum} with the enum's key * referencing {@link NetworkPresence}. Container support for this field is OPTIONAL. * * @return the person's current network status */ Enum<NetworkPresence> getNetworkPresence(); /** * Set the person's current network status. Specified as an {@link org.apache.shindig.protocol.model.Enum} with the enum's key * referencing {@link NetworkPresence}. Container support for this field is OPTIONAL. * * @param networkPresence the person's current network status */ void setNetworkPresence(org.apache.shindig.protocol.model.Enum<NetworkPresence> networkPresence); /** * Get the person's nickname. Container support for this field is REQUIRED. * * @return the person's nickname. */ String getNickname(); /** * Set the the person's nickname. Container support for this field is REQUIRED. * * @param nickname the person's nickname. */ void setNickname(String nickname); /** * Get a list of current or past organizational affiliations of this Person. * @return a list of Organization objects */ List<Organization> getOrganizations(); /** * Set a list of current or past organizational affiliations of this Person. * @param organizations a list of Organisation objects */ void setOrganizations(List<Organization> organizations); /** * Get a description of the person's pets Container support for this field is OPTIONAL. * * @return a description of the person's pets */ String getPets(); /** * Set a description of the person's pets Container support for this field is OPTIONAL. * * @param pets a description of the person's pets */ void setPets(String pets); /** * Get the Phone numbers associated with the person. * * @return the Phone numbers associated with the person */ List<ListField> getPhoneNumbers(); /** * Set the Phone numbers associated with the person. * * @param phoneNumbers the Phone numbers associated with the person */ void setPhoneNumbers(List<ListField> phoneNumbers); /** * URL of a photo of this person. The value SHOULD be a canonicalized URL, and MUST point to an * actual image file (e.g. a GIF, JPEG, or PNG image file) rather than to a web page containing an * image. Service Providers MAY return the same image at different sizes, though it is recognized * that no standard for describing images of various sizes currently exists. Note that this field * SHOULD NOT be used to send down arbitrary photos taken by this user, but specifically profile * photos of the contact suitable for display when describing the contact. * * @return a list of Photos */ List<ListField> getPhotos(); /** * Set a list of Photos for the person. * @see Person#getPhotos() * * @param photos a list of photos. */ void setPhotos(List<ListField> photos); /** * Get the Person's political views, specified as a string. Container support for this field is * OPTIONAL. * * @return the Person's political views */ String getPoliticalViews(); /** * Set the Person's political views, specified as a string. Container support for this field is * OPTIONAL. * * @param politicalViews the Person's political views */ void setPoliticalViews(String politicalViews); /** * Get the Person's preferred username, specified as a string. Container support for this field is OPTIONAL * * @return the Person's preferred username */ String getPreferredUsername(); /** * Set the Person's preferred username, specified as a string. Container support for this field is OPTIONAL * * @param preferredString the Person's preferred username */ void setPreferredUsername(String preferredString); /** * Get the Person's profile song, specified as an {@link Url}. Container support for this field * is OPTIONAL. * * @return the Person's profile song */ Url getProfileSong(); /** * Set the Person's profile song, specified as an {@link Url}. Container support for this field * is OPTIONAL. * * @param profileSong the Person's profile song */ void setProfileSong(Url profileSong); /** * Get the Person's profile video. Container support for this field is OPTIONAL. * * @return the Person's profile video */ Url getProfileVideo(); /** * Set the Person's profile video. Container support for this field is OPTIONAL. * * @param profileVideo the Person's profile video */ void setProfileVideo(Url profileVideo); /** * Get the person's favorite quotes Container support for this field is OPTIONAL. * * @return the person's favorite quotes */ List<String> getQuotes(); /** * Set the person's favorite quotes. Container support for this field is OPTIONAL. * * @param quotes the person's favorite quotes */ void setQuotes(List<String> quotes); /** * Get the person's relationship status. Container support for this field is OPTIONAL. * * @return the person's relationship status */ String getRelationshipStatus(); /** * Set the person's relationship status. Container support for this field is OPTIONAL. * * @param relationshipStatus the person's relationship status */ void setRelationshipStatus(String relationshipStatus); /** * Get the person's relgion or religious views. Container support for this field is OPTIONAL. * * @return the person's relgion or religious views */ String getReligion(); /** * Set the person's relgion or religious views. Container support for this field is OPTIONAL. * * @param religion the person's relgion or religious views */ void setReligion(String religion); /** * Get the person's comments about romance. Container support for this field is OPTIONAL. * * @return the person's comments about romance, */ String getRomance(); /** * Set a the person's comments about romance, Container support for this field is OPTIONAL. * * @param romance the person's comments about romance, */ void setRomance(String romance); /** * Get what the person is scared of Container support for this field is OPTIONAL. * * @return what the person is scared of */ String getScaredOf(); /** * Set what the person is scared of Container support for this field is OPTIONAL. * * @param scaredOf what the person is scared of */ void setScaredOf(String scaredOf); /** * Get the person's sexual orientation. Container support for this field is OPTIONAL. * * @return the person's sexual orientation */ String getSexualOrientation(); /** * Set the person's sexual orientation Container support for this field is OPTIONAL. * * @param sexualOrientation the person's sexual orientation */ void setSexualOrientation(String sexualOrientation); /** * Get the person's smoking status. Container support for this field is OPTIONAL. * * @return the person's smoking status */ Enum<Smoker> getSmoker(); /** * Set the person's smoking status. Container support for this field is OPTIONAL. * * @param newSmoker the person's smoking status */ void setSmoker(Enum<Smoker> newSmoker); /** * Get the person's favorite sports. Container support for this field is OPTIONAL. * * @return the person's favorite sports */ List<String> getSports(); /** * Set the person's favorite sports. Container support for this field is OPTIONAL. * * @param sports the person's favorite sports */ void setSports(List<String> sports); /** * Get the person's status, headline or shoutout. Container support for this field is OPTIONAL. * * @return the person's status, headline or shoutout */ String getStatus(); /** * Set the person's status, headline or shoutout. Container support for this field is OPTIONAL. * * @param status the person's status, headline or shoutout */ void setStatus(String status); /** * Get arbitrary tags about the person. Container support for this field is OPTIONAL. * * @return arbitrary tags about the person. */ List<String> getTags(); /** * Set arbitrary tags about the person. Container support for this field is OPTIONAL. * * @param tags arbitrary tags about the person. */ void setTags(List<String> tags); /** * Get the Person's time zone, specified as the difference in minutes between Greenwich Mean Time * (GMT) and the user's local time. Container support for this field is OPTIONAL. * * @return the Person's time zone */ Long getUtcOffset(); /** * Set the Person's time zone, specified as the difference in minutes between Greenwich Mean Time * (GMT) and the user's local time. Container support for this field is OPTIONAL. * * @param utcOffset the Person's time zone */ void setUtcOffset(Long utcOffset); /** * Get the person's turn offs. Container support for this field is OPTIONAL. * * @return the person's turn offs */ List<String> getTurnOffs(); /** * Set the person's turn offs. Container support for this field is OPTIONAL. * * @param turnOffs the person's turn offs */ void setTurnOffs(List<String> turnOffs); /** * Get the person's turn ons. Container support for this field is OPTIONAL. * * @return the person's turn ons */ List<String> getTurnOns(); /** * Set the person's turn ons. Container support for this field is OPTIONAL. * * @param turnOns the person's turn ons */ void setTurnOns(List<String> turnOns); /** * Get the person's favorite TV shows. Container support for this field is OPTIONAL. * * @return the person's favorite TV shows. */ List<String> getTvShows(); /** * Set the person's favorite TV shows. Container support for this field is OPTIONAL. * * @param tvShows the person's favorite TV shows. */ void setTvShows(List<String> tvShows); /** * Get the URLs related to the person, their webpages, or feeds Container support for this field * is OPTIONAL. * * @return the URLs related to the person, their webpages, or feeds */ List<Url> getUrls(); /** * Set the URLs related to the person, their webpages, or feeds Container support for this field * is OPTIONAL. * * @param urls the URLs related to the person, their webpages, or feeds */ void setUrls(List<Url> urls); /** * @return true if this person object represents the owner of the current page. */ boolean getIsOwner(); /** * Set the owner flag. * @param isOwner the isOwnerflag */ void setIsOwner(boolean isOwner); /** * Returns true if this person object represents the currently logged in user. * @return true if the person accessing this object is a viewer. */ boolean getIsViewer(); /** * Returns true if this person object represents the currently logged in user. * @param isViewer the isViewer Flag */ void setIsViewer(boolean isViewer); // Proxied fields /** * Get the person's profile URL. This URL must be fully qualified. Relative URLs will not work in * gadgets. This field MUST be stored in the urls list with a type of "profile". * * Container support for this field is OPTIONAL. * * @return the person's profile URL */ String getProfileUrl(); /** * Set the person's profile URL. This URL must be fully qualified. Relative URLs will not work in * gadgets. This field MUST be stored in the urls list with a type of "profile". * * Container support for this field is OPTIONAL. * * @param profileUrl the person's profile URL */ void setProfileUrl(String profileUrl); /** * Get the person's photo thumbnail URL, specified as a string. This URL must be fully qualified. * Relative URLs will not work in gadgets. * This field MUST be stored in the photos list with a type of "thumbnail". * * Container support for this field is OPTIONAL. * * @return the person's photo thumbnail URL */ String getThumbnailUrl(); /** * Set the person's photo thumbnail URL, specified as a string. This URL must be fully qualified. * Relative URLs will not work in gadgets. * This field MUST be stored in the photos list with a type of "thumbnail". * * Container support for this field is OPTIONAL. * * @param thumbnailUrl the person's photo thumbnail URL */ void setThumbnailUrl(String thumbnailUrl); }
googleapis/google-cloud-java
36,588
java-vision/google-cloud-vision/src/main/java/com/google/cloud/vision/v1p4beta1/ImageAnnotatorClient.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.vision.v1p4beta1; import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.httpjson.longrunning.OperationsClient; import com.google.api.gax.longrunning.OperationFuture; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.vision.v1p4beta1.stub.ImageAnnotatorStub; import com.google.cloud.vision.v1p4beta1.stub.ImageAnnotatorStubSettings; import com.google.longrunning.Operation; import java.io.IOException; import java.util.List; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: Service that performs Google Cloud Vision API detection tasks over client * images, such as face, landmark, logo, label, and text detection. The ImageAnnotator service * returns detected entities from the images. * * <p>This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ImageAnnotatorClient imageAnnotatorClient = ImageAnnotatorClient.create()) { * List<AnnotateImageRequest> requests = new ArrayList<>(); * BatchAnnotateImagesResponse response = imageAnnotatorClient.batchAnnotateImages(requests); * } * }</pre> * * <p>Note: close() needs to be called on the ImageAnnotatorClient object to clean up resources such * as threads. In the example above, try-with-resources is used, which automatically calls close(). * * <table> * <caption>Methods</caption> * <tr> * <th>Method</th> * <th>Description</th> * <th>Method Variants</th> * </tr> * <tr> * <td><p> BatchAnnotateImages</td> * <td><p> Run image detection and annotation for a batch of images.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> batchAnnotateImages(List&lt;AnnotateImageRequest&gt; requests) * <li><p> batchAnnotateImages(BatchAnnotateImagesRequest request) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> batchAnnotateImagesCallable() * </ul> * </td> * </tr> * <tr> * <td><p> BatchAnnotateFiles</td> * <td><p> Service that performs image detection and annotation for a batch of files. Now only "application/pdf", "image/tiff" and "image/gif" are supported. * <p> This service will extract at most 5 (customers can specify which 5 in AnnotateFileRequest.pages) frames (gif) or pages (pdf or tiff) from each file provided and perform detection and annotation for each image extracted.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> batchAnnotateFiles(List&lt;AnnotateFileRequest&gt; requests) * <li><p> batchAnnotateFiles(BatchAnnotateFilesRequest request) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> batchAnnotateFilesCallable() * </ul> * </td> * </tr> * <tr> * <td><p> AsyncBatchAnnotateImages</td> * <td><p> Run asynchronous image detection and annotation for a list of images. * <p> Progress and results can be retrieved through the `google.longrunning.Operations` interface. `Operation.metadata` contains `OperationMetadata` (metadata). `Operation.response` contains `AsyncBatchAnnotateImagesResponse` (results). * <p> This service will write image annotation outputs to json files in customer GCS bucket, each json file containing BatchAnnotateImagesResponse proto.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> asyncBatchAnnotateImagesAsync(List&lt;AnnotateImageRequest&gt; requests, OutputConfig outputConfig) * <li><p> asyncBatchAnnotateImagesAsync(AsyncBatchAnnotateImagesRequest request) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> asyncBatchAnnotateImagesOperationCallable() * <li><p> asyncBatchAnnotateImagesCallable() * </ul> * </td> * </tr> * <tr> * <td><p> AsyncBatchAnnotateFiles</td> * <td><p> Run asynchronous image detection and annotation for a list of generic files, such as PDF files, which may contain multiple pages and multiple images per page. Progress and results can be retrieved through the `google.longrunning.Operations` interface. `Operation.metadata` contains `OperationMetadata` (metadata). `Operation.response` contains `AsyncBatchAnnotateFilesResponse` (results).</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> asyncBatchAnnotateFilesAsync(List&lt;AsyncAnnotateFileRequest&gt; requests) * <li><p> asyncBatchAnnotateFilesAsync(AsyncBatchAnnotateFilesRequest request) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> asyncBatchAnnotateFilesOperationCallable() * <li><p> asyncBatchAnnotateFilesCallable() * </ul> * </td> * </tr> * </table> * * <p>See the individual methods for example code. * * <p>Many parameters require resource names to be formatted in a particular way. To assist with * these names, this class includes a format method for each type of name, and additionally a parse * method to extract the individual identifiers contained within names that are returned. * * <p>This class can be customized by passing in a custom instance of ImageAnnotatorSettings to * create(). For example: * * <p>To customize credentials: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * ImageAnnotatorSettings imageAnnotatorSettings = * ImageAnnotatorSettings.newBuilder() * .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials)) * .build(); * ImageAnnotatorClient imageAnnotatorClient = ImageAnnotatorClient.create(imageAnnotatorSettings); * }</pre> * * <p>To customize the endpoint: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * ImageAnnotatorSettings imageAnnotatorSettings = * ImageAnnotatorSettings.newBuilder().setEndpoint(myEndpoint).build(); * ImageAnnotatorClient imageAnnotatorClient = ImageAnnotatorClient.create(imageAnnotatorSettings); * }</pre> * * <p>To use REST (HTTP1.1/JSON) transport (instead of gRPC) for sending and receiving requests over * the wire: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * ImageAnnotatorSettings imageAnnotatorSettings = * ImageAnnotatorSettings.newHttpJsonBuilder().build(); * ImageAnnotatorClient imageAnnotatorClient = ImageAnnotatorClient.create(imageAnnotatorSettings); * }</pre> * * <p>Please refer to the GitHub repository's samples for more quickstart code snippets. */ @BetaApi @Generated("by gapic-generator-java") public class ImageAnnotatorClient implements BackgroundResource { private final ImageAnnotatorSettings settings; private final ImageAnnotatorStub stub; private final OperationsClient httpJsonOperationsClient; private final com.google.longrunning.OperationsClient operationsClient; /** Constructs an instance of ImageAnnotatorClient with default settings. */ public static final ImageAnnotatorClient create() throws IOException { return create(ImageAnnotatorSettings.newBuilder().build()); } /** * Constructs an instance of ImageAnnotatorClient, using the given settings. The channels are * created based on the settings passed in, or defaults for any settings that are not set. */ public static final ImageAnnotatorClient create(ImageAnnotatorSettings settings) throws IOException { return new ImageAnnotatorClient(settings); } /** * Constructs an instance of ImageAnnotatorClient, using the given stub for making calls. This is * for advanced usage - prefer using create(ImageAnnotatorSettings). */ public static final ImageAnnotatorClient create(ImageAnnotatorStub stub) { return new ImageAnnotatorClient(stub); } /** * Constructs an instance of ImageAnnotatorClient, using the given settings. This is protected so * that it is easy to make a subclass, but otherwise, the static factory methods should be * preferred. */ protected ImageAnnotatorClient(ImageAnnotatorSettings settings) throws IOException { this.settings = settings; this.stub = ((ImageAnnotatorStubSettings) settings.getStubSettings()).createStub(); this.operationsClient = com.google.longrunning.OperationsClient.create(this.stub.getOperationsStub()); this.httpJsonOperationsClient = OperationsClient.create(this.stub.getHttpJsonOperationsStub()); } protected ImageAnnotatorClient(ImageAnnotatorStub stub) { this.settings = null; this.stub = stub; this.operationsClient = com.google.longrunning.OperationsClient.create(this.stub.getOperationsStub()); this.httpJsonOperationsClient = OperationsClient.create(this.stub.getHttpJsonOperationsStub()); } public final ImageAnnotatorSettings getSettings() { return settings; } public ImageAnnotatorStub getStub() { return stub; } /** * Returns the OperationsClient that can be used to query the status of a long-running operation * returned by another API method call. */ public final com.google.longrunning.OperationsClient getOperationsClient() { return operationsClient; } /** * Returns the OperationsClient that can be used to query the status of a long-running operation * returned by another API method call. */ @BetaApi public final OperationsClient getHttpJsonOperationsClient() { return httpJsonOperationsClient; } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Run image detection and annotation for a batch of images. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ImageAnnotatorClient imageAnnotatorClient = ImageAnnotatorClient.create()) { * List<AnnotateImageRequest> requests = new ArrayList<>(); * BatchAnnotateImagesResponse response = imageAnnotatorClient.batchAnnotateImages(requests); * } * }</pre> * * @param requests Required. Individual image annotation requests for this batch. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final BatchAnnotateImagesResponse batchAnnotateImages( List<AnnotateImageRequest> requests) { BatchAnnotateImagesRequest request = BatchAnnotateImagesRequest.newBuilder().addAllRequests(requests).build(); return batchAnnotateImages(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Run image detection and annotation for a batch of images. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ImageAnnotatorClient imageAnnotatorClient = ImageAnnotatorClient.create()) { * BatchAnnotateImagesRequest request = * BatchAnnotateImagesRequest.newBuilder() * .addAllRequests(new ArrayList<AnnotateImageRequest>()) * .build(); * BatchAnnotateImagesResponse response = imageAnnotatorClient.batchAnnotateImages(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final BatchAnnotateImagesResponse batchAnnotateImages(BatchAnnotateImagesRequest request) { return batchAnnotateImagesCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Run image detection and annotation for a batch of images. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ImageAnnotatorClient imageAnnotatorClient = ImageAnnotatorClient.create()) { * BatchAnnotateImagesRequest request = * BatchAnnotateImagesRequest.newBuilder() * .addAllRequests(new ArrayList<AnnotateImageRequest>()) * .build(); * ApiFuture<BatchAnnotateImagesResponse> future = * imageAnnotatorClient.batchAnnotateImagesCallable().futureCall(request); * // Do something. * BatchAnnotateImagesResponse response = future.get(); * } * }</pre> */ public final UnaryCallable<BatchAnnotateImagesRequest, BatchAnnotateImagesResponse> batchAnnotateImagesCallable() { return stub.batchAnnotateImagesCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Service that performs image detection and annotation for a batch of files. Now only * "application/pdf", "image/tiff" and "image/gif" are supported. * * <p>This service will extract at most 5 (customers can specify which 5 in * AnnotateFileRequest.pages) frames (gif) or pages (pdf or tiff) from each file provided and * perform detection and annotation for each image extracted. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ImageAnnotatorClient imageAnnotatorClient = ImageAnnotatorClient.create()) { * List<AnnotateFileRequest> requests = new ArrayList<>(); * BatchAnnotateFilesResponse response = imageAnnotatorClient.batchAnnotateFiles(requests); * } * }</pre> * * @param requests Required. The list of file annotation requests. Right now we support only one * AnnotateFileRequest in BatchAnnotateFilesRequest. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final BatchAnnotateFilesResponse batchAnnotateFiles(List<AnnotateFileRequest> requests) { BatchAnnotateFilesRequest request = BatchAnnotateFilesRequest.newBuilder().addAllRequests(requests).build(); return batchAnnotateFiles(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Service that performs image detection and annotation for a batch of files. Now only * "application/pdf", "image/tiff" and "image/gif" are supported. * * <p>This service will extract at most 5 (customers can specify which 5 in * AnnotateFileRequest.pages) frames (gif) or pages (pdf or tiff) from each file provided and * perform detection and annotation for each image extracted. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ImageAnnotatorClient imageAnnotatorClient = ImageAnnotatorClient.create()) { * BatchAnnotateFilesRequest request = * BatchAnnotateFilesRequest.newBuilder() * .addAllRequests(new ArrayList<AnnotateFileRequest>()) * .build(); * BatchAnnotateFilesResponse response = imageAnnotatorClient.batchAnnotateFiles(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final BatchAnnotateFilesResponse batchAnnotateFiles(BatchAnnotateFilesRequest request) { return batchAnnotateFilesCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Service that performs image detection and annotation for a batch of files. Now only * "application/pdf", "image/tiff" and "image/gif" are supported. * * <p>This service will extract at most 5 (customers can specify which 5 in * AnnotateFileRequest.pages) frames (gif) or pages (pdf or tiff) from each file provided and * perform detection and annotation for each image extracted. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ImageAnnotatorClient imageAnnotatorClient = ImageAnnotatorClient.create()) { * BatchAnnotateFilesRequest request = * BatchAnnotateFilesRequest.newBuilder() * .addAllRequests(new ArrayList<AnnotateFileRequest>()) * .build(); * ApiFuture<BatchAnnotateFilesResponse> future = * imageAnnotatorClient.batchAnnotateFilesCallable().futureCall(request); * // Do something. * BatchAnnotateFilesResponse response = future.get(); * } * }</pre> */ public final UnaryCallable<BatchAnnotateFilesRequest, BatchAnnotateFilesResponse> batchAnnotateFilesCallable() { return stub.batchAnnotateFilesCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Run asynchronous image detection and annotation for a list of images. * * <p>Progress and results can be retrieved through the `google.longrunning.Operations` interface. * `Operation.metadata` contains `OperationMetadata` (metadata). `Operation.response` contains * `AsyncBatchAnnotateImagesResponse` (results). * * <p>This service will write image annotation outputs to json files in customer GCS bucket, each * json file containing BatchAnnotateImagesResponse proto. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ImageAnnotatorClient imageAnnotatorClient = ImageAnnotatorClient.create()) { * List<AnnotateImageRequest> requests = new ArrayList<>(); * OutputConfig outputConfig = OutputConfig.newBuilder().build(); * AsyncBatchAnnotateImagesResponse response = * imageAnnotatorClient.asyncBatchAnnotateImagesAsync(requests, outputConfig).get(); * } * }</pre> * * @param requests Required. Individual image annotation requests for this batch. * @param outputConfig Required. The desired output location and metadata (e.g. format). * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final OperationFuture<AsyncBatchAnnotateImagesResponse, OperationMetadata> asyncBatchAnnotateImagesAsync( List<AnnotateImageRequest> requests, OutputConfig outputConfig) { AsyncBatchAnnotateImagesRequest request = AsyncBatchAnnotateImagesRequest.newBuilder() .addAllRequests(requests) .setOutputConfig(outputConfig) .build(); return asyncBatchAnnotateImagesAsync(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Run asynchronous image detection and annotation for a list of images. * * <p>Progress and results can be retrieved through the `google.longrunning.Operations` interface. * `Operation.metadata` contains `OperationMetadata` (metadata). `Operation.response` contains * `AsyncBatchAnnotateImagesResponse` (results). * * <p>This service will write image annotation outputs to json files in customer GCS bucket, each * json file containing BatchAnnotateImagesResponse proto. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ImageAnnotatorClient imageAnnotatorClient = ImageAnnotatorClient.create()) { * AsyncBatchAnnotateImagesRequest request = * AsyncBatchAnnotateImagesRequest.newBuilder() * .addAllRequests(new ArrayList<AnnotateImageRequest>()) * .setOutputConfig(OutputConfig.newBuilder().build()) * .build(); * AsyncBatchAnnotateImagesResponse response = * imageAnnotatorClient.asyncBatchAnnotateImagesAsync(request).get(); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final OperationFuture<AsyncBatchAnnotateImagesResponse, OperationMetadata> asyncBatchAnnotateImagesAsync(AsyncBatchAnnotateImagesRequest request) { return asyncBatchAnnotateImagesOperationCallable().futureCall(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Run asynchronous image detection and annotation for a list of images. * * <p>Progress and results can be retrieved through the `google.longrunning.Operations` interface. * `Operation.metadata` contains `OperationMetadata` (metadata). `Operation.response` contains * `AsyncBatchAnnotateImagesResponse` (results). * * <p>This service will write image annotation outputs to json files in customer GCS bucket, each * json file containing BatchAnnotateImagesResponse proto. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ImageAnnotatorClient imageAnnotatorClient = ImageAnnotatorClient.create()) { * AsyncBatchAnnotateImagesRequest request = * AsyncBatchAnnotateImagesRequest.newBuilder() * .addAllRequests(new ArrayList<AnnotateImageRequest>()) * .setOutputConfig(OutputConfig.newBuilder().build()) * .build(); * OperationFuture<AsyncBatchAnnotateImagesResponse, OperationMetadata> future = * imageAnnotatorClient.asyncBatchAnnotateImagesOperationCallable().futureCall(request); * // Do something. * AsyncBatchAnnotateImagesResponse response = future.get(); * } * }</pre> */ public final OperationCallable< AsyncBatchAnnotateImagesRequest, AsyncBatchAnnotateImagesResponse, OperationMetadata> asyncBatchAnnotateImagesOperationCallable() { return stub.asyncBatchAnnotateImagesOperationCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Run asynchronous image detection and annotation for a list of images. * * <p>Progress and results can be retrieved through the `google.longrunning.Operations` interface. * `Operation.metadata` contains `OperationMetadata` (metadata). `Operation.response` contains * `AsyncBatchAnnotateImagesResponse` (results). * * <p>This service will write image annotation outputs to json files in customer GCS bucket, each * json file containing BatchAnnotateImagesResponse proto. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ImageAnnotatorClient imageAnnotatorClient = ImageAnnotatorClient.create()) { * AsyncBatchAnnotateImagesRequest request = * AsyncBatchAnnotateImagesRequest.newBuilder() * .addAllRequests(new ArrayList<AnnotateImageRequest>()) * .setOutputConfig(OutputConfig.newBuilder().build()) * .build(); * ApiFuture<Operation> future = * imageAnnotatorClient.asyncBatchAnnotateImagesCallable().futureCall(request); * // Do something. * Operation response = future.get(); * } * }</pre> */ public final UnaryCallable<AsyncBatchAnnotateImagesRequest, Operation> asyncBatchAnnotateImagesCallable() { return stub.asyncBatchAnnotateImagesCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Run asynchronous image detection and annotation for a list of generic files, such as PDF files, * which may contain multiple pages and multiple images per page. Progress and results can be * retrieved through the `google.longrunning.Operations` interface. `Operation.metadata` contains * `OperationMetadata` (metadata). `Operation.response` contains `AsyncBatchAnnotateFilesResponse` * (results). * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ImageAnnotatorClient imageAnnotatorClient = ImageAnnotatorClient.create()) { * List<AsyncAnnotateFileRequest> requests = new ArrayList<>(); * AsyncBatchAnnotateFilesResponse response = * imageAnnotatorClient.asyncBatchAnnotateFilesAsync(requests).get(); * } * }</pre> * * @param requests Required. Individual async file annotation requests for this batch. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final OperationFuture<AsyncBatchAnnotateFilesResponse, OperationMetadata> asyncBatchAnnotateFilesAsync(List<AsyncAnnotateFileRequest> requests) { AsyncBatchAnnotateFilesRequest request = AsyncBatchAnnotateFilesRequest.newBuilder().addAllRequests(requests).build(); return asyncBatchAnnotateFilesAsync(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Run asynchronous image detection and annotation for a list of generic files, such as PDF files, * which may contain multiple pages and multiple images per page. Progress and results can be * retrieved through the `google.longrunning.Operations` interface. `Operation.metadata` contains * `OperationMetadata` (metadata). `Operation.response` contains `AsyncBatchAnnotateFilesResponse` * (results). * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ImageAnnotatorClient imageAnnotatorClient = ImageAnnotatorClient.create()) { * AsyncBatchAnnotateFilesRequest request = * AsyncBatchAnnotateFilesRequest.newBuilder() * .addAllRequests(new ArrayList<AsyncAnnotateFileRequest>()) * .build(); * AsyncBatchAnnotateFilesResponse response = * imageAnnotatorClient.asyncBatchAnnotateFilesAsync(request).get(); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final OperationFuture<AsyncBatchAnnotateFilesResponse, OperationMetadata> asyncBatchAnnotateFilesAsync(AsyncBatchAnnotateFilesRequest request) { return asyncBatchAnnotateFilesOperationCallable().futureCall(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Run asynchronous image detection and annotation for a list of generic files, such as PDF files, * which may contain multiple pages and multiple images per page. Progress and results can be * retrieved through the `google.longrunning.Operations` interface. `Operation.metadata` contains * `OperationMetadata` (metadata). `Operation.response` contains `AsyncBatchAnnotateFilesResponse` * (results). * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ImageAnnotatorClient imageAnnotatorClient = ImageAnnotatorClient.create()) { * AsyncBatchAnnotateFilesRequest request = * AsyncBatchAnnotateFilesRequest.newBuilder() * .addAllRequests(new ArrayList<AsyncAnnotateFileRequest>()) * .build(); * OperationFuture<AsyncBatchAnnotateFilesResponse, OperationMetadata> future = * imageAnnotatorClient.asyncBatchAnnotateFilesOperationCallable().futureCall(request); * // Do something. * AsyncBatchAnnotateFilesResponse response = future.get(); * } * }</pre> */ public final OperationCallable< AsyncBatchAnnotateFilesRequest, AsyncBatchAnnotateFilesResponse, OperationMetadata> asyncBatchAnnotateFilesOperationCallable() { return stub.asyncBatchAnnotateFilesOperationCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Run asynchronous image detection and annotation for a list of generic files, such as PDF files, * which may contain multiple pages and multiple images per page. Progress and results can be * retrieved through the `google.longrunning.Operations` interface. `Operation.metadata` contains * `OperationMetadata` (metadata). `Operation.response` contains `AsyncBatchAnnotateFilesResponse` * (results). * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ImageAnnotatorClient imageAnnotatorClient = ImageAnnotatorClient.create()) { * AsyncBatchAnnotateFilesRequest request = * AsyncBatchAnnotateFilesRequest.newBuilder() * .addAllRequests(new ArrayList<AsyncAnnotateFileRequest>()) * .build(); * ApiFuture<Operation> future = * imageAnnotatorClient.asyncBatchAnnotateFilesCallable().futureCall(request); * // Do something. * Operation response = future.get(); * } * }</pre> */ public final UnaryCallable<AsyncBatchAnnotateFilesRequest, Operation> asyncBatchAnnotateFilesCallable() { return stub.asyncBatchAnnotateFilesCallable(); } @Override public final void close() { stub.close(); } @Override public void shutdown() { stub.shutdown(); } @Override public boolean isShutdown() { return stub.isShutdown(); } @Override public boolean isTerminated() { return stub.isTerminated(); } @Override public void shutdownNow() { stub.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return stub.awaitTermination(duration, unit); } }
google/j2objc
35,497
translator/src/test/java/com/google/devtools/j2objc/translate/InnerClassExtractorTest.java
/* * Copyright 2011 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.devtools.j2objc.translate; import com.google.devtools.j2objc.GenerationTest; import com.google.devtools.j2objc.Options.MemoryManagementOption; import com.google.devtools.j2objc.ast.AbstractTypeDeclaration; import com.google.devtools.j2objc.ast.CompilationUnit; import java.io.IOException; import java.util.List; /** * Unit tests for {@link InnerClassExtractor}. * * @author Tom Ball */ public class InnerClassExtractorTest extends GenerationTest { @Override protected void setUp() throws IOException { super.setUp(); // Reference counting by default, change for ARC-specific tests. options.setMemoryManagementOption(MemoryManagementOption.REFERENCE_COUNTING); } protected List<AbstractTypeDeclaration> translateClassBody(String testSource) { String source = "public class Test { " + testSource + " }"; CompilationUnit unit = translateType("Test", source); return unit.getTypes(); } public void testSimpleInnerClass() throws IOException { String source = "public class A { class B { int test() { return o.hashCode(); }} Object o; }"; String translation = translateSourceFile(source, "A", "A.h"); assertTranslation(translation, "- (instancetype)initWithA:(A *)outer$;"); translation = getTranslatedFile("A.m"); assertTranslation(translation, "A *this$0_;"); assertTranslation(translation, "[nil_chk(this$0_->o_) hash]"); assertTranslation(translation, "JreStrongAssign(&self->this$0_, outer$);"); } public void testWeakSimpleInnerClass() throws IOException { String source = "import com.google.j2objc.annotations.WeakOuter; " + "public class A { @WeakOuter class B { int test() { return o.hashCode(); }} Object o; }"; String translation = translateSourceFile(source, "A", "A.m"); assertTranslation(translation, "WEAK_ A *this$0_;"); assertTranslation(translation, "this$0_ = outer$;"); } public void testWeakArcSimpleInnerClass() throws IOException { options.setMemoryManagementOption(MemoryManagementOption.ARC); String source = "import com.google.j2objc.annotations.WeakOuter; " + "public class A { Object o;" + " @WeakOuter class B { int test() { return o.hashCode(); } Object o2; }}"; String translation = translateSourceFile(source, "A", "A.h"); assertTranslation(translation, "id o_;"); assertTranslation(translation, "id o2_;"); translation = getTranslatedFile("A.m"); assertTranslation(translation, "WEAK_ A *this$0_;"); } public void testInnerInnerClass() throws IOException { String source = "public class A { class B { " + "class C {int test() { return o.hashCode(); }}} Object o; }"; String translation = translateSourceFile(source, "A", "A.h"); assertTranslation(translation, "- (instancetype)initWithA:(A *)outer$;"); assertTranslation(translation, "- (instancetype)initWithA_B:(A_B *)outer$;"); translation = getTranslatedFile("A.m"); assertTranslation(translation, "A *this$0_;"); assertTranslation(translation, "A_B *this$0_;"); assertTranslation(translation, "[nil_chk(this$0_->this$0_->o_) hash]"); } public void testWeakInnerInnerClass() throws IOException { String source = "public class A { class B { " + "@com.google.j2objc.annotations.WeakOuter class C {" + " int test() { return o.hashCode(); }}} Object o; }"; String translation = translateSourceFile(source, "A", "A.m"); assertTranslation(translation, "A *this$0_;"); assertTranslation(translation, "WEAK_ A_B *this$0_;"); assertTranslation(translation, "[nil_chk(this$0_->this$0_->o_) hash]"); } public void testInnerMethodAnonymousClass() throws IOException { String source = "public class A {" + " abstract class C { public abstract void foo(); }" + " class B { " + " public void foo(final int j) {" + " C r = new C() {" + " public void foo() { int hash = j + o.hashCode(); }" + " };" + " }" + " }" + " Object o;" + "}"; String translation = translateSourceFile(source, "A", "A.h"); assertTranslation(translation, "- (instancetype)initWithA:(A *)outer$;"); translation = getTranslatedFile("A.m"); assertTranslatedLines(translation, "- (instancetype)initWithA_B:(A_B *)outer$", "withInt:(int32_t)capture$0;"); assertTranslation(translation, "A *this$0_;"); assertTranslation(translation, "A_B *this$1_;"); assertTranslation(translation, "int32_t val$j_;"); assertTranslatedLines(translation, "void A_B_1_initWithA_B_withInt_(A_B_1 *self, A_B *outer$, int32_t capture$0) {", " JreStrongAssign(&self->this$1_, outer$);", " self->val$j_ = capture$0;", " A_C_initWithA_(self, outer$->this$0_);", "}"); assertTranslation(translation, "[nil_chk(this$1_->this$0_->o_) hash]"); } /** * Verify that a static inner class is extracted. */ public void testStaticInnerClass() throws IOException { String translation = translateSourceFile( "class Test { static class Foo { int i; Foo() { this(0); } Foo(int i) { this.i = i; } } }", "Test", "Test.h"); assertTranslatedLines(translation, "@interface Test_Foo : NSObject {", "@public", "int32_t i_;", "}"); translation = getTranslatedFile("Test.m"); assertTranslatedLines(translation, "void Test_Foo_init(Test_Foo *self) {", " Test_Foo_initWithInt_(self, 0);", "}"); assertTranslatedLines(translation, "void Test_Foo_initWithInt_(Test_Foo *self, int32_t i) {", " NSObject_init(self);", " self->i_ = i;", "}"); } /** * Verify that an inner class is moved to the compilation unit's types list. */ public void testInnerClassExtracted() { List<AbstractTypeDeclaration> types = translateClassBody("class Foo { }"); assertEquals(2, types.size()); assertEquals("Test", types.get(0).getName().getIdentifier()); assertEquals("Foo", types.get(1).getName().getIdentifier()); } /** * Regression test: verify that references to class members of a type with * an inner class aren't disturbed. */ public void testStaticMethodInvokingStaticMethodWithInnerClass() throws IOException { String translation = translateSourceFile( "class Test { public static int test(Object object) { return 0; }" + "public static int test(Object object, Object foo) {" + " if (foo == null) { return Test.test(object); } return 1; } " + "private class Inner {} }", "Test", "Test.m"); assertTranslation(translation, "return Test_testWithId_(object);"); } public void testInnerClassInvokingExplicitOuterMethod() throws IOException { String translation = translateSourceFile( "class Test { public int size() { return 0; } " + "class Inner { int size() { return Test.this.size(); } } }", "Test", "Test.m"); assertTranslation(translation, "JreStrongAssign(&self->this$0_, outer$);"); assertTranslation(translation, "return [this$0_ size];"); } public void testInnerClassInvokingOuterMethod() throws IOException { String translation = translateSourceFile( "class Test { public int size() { return 0; } " + "class Inner { int getCount() { return size(); } } }", "Test", "Test.m"); assertTranslation(translation, "return [this$0_ size];"); } public void testInnerSubclassInvokingOuterMethod() throws IOException { String translation = translateSourceFile( "class Test { public int size() { return 0; } public void add(int n) {} class Inner {} " + "class Innermost { void test() { Test.this.add(size()); } } }", "Test", "Test.m"); assertTranslation(translation, "JreStrongAssign(&self->this$0_, outer$);"); assertTranslation(translation, "[this$0_ addWithInt:[this$0_ size]];"); } public void testInnerClassDefaultInitialization() throws IOException { String translation = translateSourceFile( "class Test { Inner inner = new Inner(true); public int size() { return 0; }" + "class Inner { Inner(boolean b) {} int size() { return Test.this.size(); } } }", "Test", "Test.m"); assertTranslation(translation, "JreStrongAssignAndConsume(&self->inner_, " + "new_Test_Inner_initWithTest_withBoolean_(self, true));"); assertTranslation(translation, "JreStrongAssign(&self->this$0_, outer$);"); } public void testOuterClassAccessOuterVars() throws IOException { String translation = translateSourceFile( "class Test { int elementCount;" + "public Test() { elementCount = 0; }" + "private class Iterator { public void remove() { elementCount--; } } }", "Test", "Test.m"); assertTranslatedLines(translation, "elementCount_ = 0;"); assertTranslatedLines(translation, "- (void)remove {", "this$0_->elementCount_--;"); } public void testOuterInterfaceMethodReference() throws IOException { String source = "class Test { " + " interface Foo { void foo(); } " + " abstract class Bar implements Foo { " + " class Inner { Inner() { foo(); } } " + " class Inner2 extends Inner { void bar() { foo(); } } " + " Inner makeInner() { return new Inner(); } }" + " public void test() { " + " Bar bar = new Bar() { public void foo() { } };" + " Bar.Inner inner = bar.new Inner(); } }"; String translation = translateSourceFile(source, "Test", "Test.m"); assertTranslation(translation, "- (void)bar {\n [this$1_ foo]"); assertTranslation(translation, "- (Test_Bar_Inner *)makeInner {\n" + " return create_Test_Bar_Inner_initWithTest_Bar_(self);"); assertTranslation(translation, "create_Test_Bar_Inner_initWithTest_Bar_(bar);"); } public void testMultipleThisReferences() throws IOException { String source = "class A { private int x = 0; " + " interface Foo { void doSomething(); } " + " class Inner { private int x = 1; " + " public void blah() { " + " new Foo() { public void doSomething() { " + " Inner.this.x = 2; A.this.x = 3; }}; }}}"; CompilationUnit unit = translateType("A", source); List<AbstractTypeDeclaration> types = unit.getTypes(); assertEquals(4, types.size()); String translation = translateSourceFile(source, "A", "A.m"); // Anonymous class constructor in Inner.blah() assertTranslation(translation, "create_A_Inner_1_initWithA_Inner_(self)"); // A.Inner.x referred to in anonymous Foo assertTranslation(translation, "this$0_->x_ = 2"); // A.x referred to in anonymous Foo assertTranslation(translation, "this$0_->this$0_->x_ = 3"); // A.Inner init in anonymous Foo's constructor assertTranslation(translation, "JreStrongAssign(&self->this$0_, outer$)"); } /** * This test differs from the last one only in the addition of another * 'this' reference before the anonymous class creation. */ public void testMultipleThisReferencesWithPreviousReference() throws IOException { String source = "class A { private int x = 0; " + " interface Foo { void doSomething(); } " + " class Inner { private int x = 1; " + " public void blah() { " + " A.this.x = 2; " + " new Foo() { public void doSomething() { " + " Inner.this.x = 3; A.this.x = 4; }}; }}}"; CompilationUnit unit = translateType("A", source); List<AbstractTypeDeclaration> types = unit.getTypes(); assertEquals(4, types.size()); String translation = translateSourceFile(source, "A", "A.m"); // Anonymous class constructor in Inner.blah() assertTranslation(translation, "create_A_Inner_1_initWithA_Inner_(self)"); // A.x referred to in A.Inner. assertTranslation(translation, "this$0_->x_ = 2"); // A.Inner.x referred to in anonymous Foo. assertTranslation(translation, "this$0_->x_ = 3"); // A.x referred to in anonymous Foo assertTranslation(translation, "this$0_->this$0_->x_ = 4"); // A.Inner init in anonymous Foo's constructor assertTranslation(translation, "JreStrongAssign(&self->this$0_, outer$)"); } public void testOuterMethodReference() throws IOException { String source = "class Test { " + " interface Foo { void foo(); } " + " class Inner { " + " void bar() { " + " final int x = 0; final int y = 0; " + " Foo foo = new Foo() { " + " public void foo() { if (x ==0) mumble(y); } }; } }" + " private void mumble(int y) { } }"; String translation = translateSourceFile(source, "Test", "Test.m"); assertTranslation(translation, "Test_mumbleWithInt_(this$0_->this$0_, 0)"); } public void testInnerSubClassOfGenericClassInner() throws IOException { String source = "class Test { " + "class A<E extends A<E>.Inner> { public class Inner { } } " + "class B extends A<B.BInner> { public class BInner extends A<B.BInner>.Inner { } } }"; String translation = translateSourceFile(source, "Test", "Test.h"); assertTranslation(translation, "@interface Test_B_BInner : Test_A_Inner"); } public void testGenericInnerSubClassOfGenericClassGenericInner() throws IOException { String source = "class Test<E> { " + "class A<E> { } class B<E> extends A<E> { B(int i) { } } }"; String translation = translateSourceFile(source, "Test", "Test.m"); assertTranslation(translation, "- (instancetype)initWithTest:(Test *)outer$\n" + " withInt:(int32_t)i"); } public void testInnerSubClassOfOtherInnerWithOuterRefs() throws IOException { String source = "class Test { " + "class A { " + " public void foo() { } " + " public class Inner { void test() { foo(); } } } " + "class B extends A { " + " public class BInner extends A.Inner { void test() { foo(); } } } " + " public static void main(String[] args) { B b = new Test().new B(); }}"; String translation = translateSourceFile(source, "Test", "Test.m"); // Check that outer fields are added to A.Inner and B.BInner. assertTranslation(translation, "@interface Test_A_Inner () {\n @public\n Test_A *this$0_;"); assertTranslation(translation, "@interface Test_B_BInner () {\n @public\n Test_B *this$1_;"); // Check that B has a constructor that correctly calls constructor of A // with right outer. assertTranslatedLines(translation, "void Test_B_initWithTest_(Test_B *self, Test *outer$) {", " Test_A_initWithTest_(self, outer$);", "}"); } public void testInnerClassQualifiedAndUnqualfiedOuterReferences() throws IOException { String source = "class Test { " + " public int i = 0; " + " class Inner { " + " void foo(int i) { Test.this.i = i; } " + " void bar() { int j = i; } } }"; String translation = translateSourceFile(source, "Test", "Test.m"); assertTranslation(translation, "- (void)fooWithInt:(int32_t)i {\n this$0_->i_ ="); assertTranslation(translation, "- (void)bar {\n int32_t j = this$0_->i_"); } public void testInnerClassExtendsAnotherInner() throws IOException { String translation = translateSourceFile( "class Test { " + " Integer i = 1; " + " class Inner1 { } " + " class Inner2 extends Inner1 { " + " int j = 1; " + " public int foo() { return i + j; } } }", "Test", "Test.m"); assertTranslation(translation, "Test *this$1"); // Inner2's outer reference. assertTranslation(translation, "[((JavaLangInteger *) nil_chk(this$1_->i_)) intValue] + j_"); } public void testInnerClassInstantiatesAnotherInner() throws IOException { String translation = translateSourceFile( "class Test { " + " Integer i = 1; " + " class Inner1 { public int foo() { return i + 1; } } " + " class Inner2 { Inner1 inner1 = new Inner1(); } }", "Test", "Test.m"); assertTranslation(translation, "new_Test_Inner1_initWithTest_(outer$)"); translation = getTranslatedFile("Test.h"); assertTranslation(translation, "@interface Test_Inner2 : NSObject {\n" + " @public\n" + " Test_Inner1 *inner1_;"); } public void testInnerClassWithInnerSuperClass() throws IOException { String translation = translateSourceFile( "class Test { " + " class Inner1 { public Inner1(int n) { } } " + " class Inner2 extends Inner1 { public Inner2(int n, long l) { super(n); } } }", "Test", "Test.m"); assertTranslation(translation, "Test_Inner1_initWithTest_withInt_(self, outer$, n);"); } public void testInnerSubClassOfOtherInnerWithOuterRefsExtraction() throws IOException { String source = "public class Test { " + "int i; " + "class A { " + " private void foo() { i++; } " + " public class Inner { Inner() { foo(); } } } " + "class B extends A { " + " public class BInner extends A.Inner { } } " + "public static void main(String[] args) { B b = new Test().new B(); }}"; String translation = translateSourceFile(source, "Test", "Test.m"); // Verify that B's translation has the Test field declared. assertTranslation(translation, "Test *this$0_;"); // Verify that A has a Test field (this$0). assertTranslatedLines(translation, "@interface Test_A () {", "@public", "Test *this$0_;", "}"); // Verify that B does not have a Test field. assertNotInTranslation(translation, "@interface Test_B ()"); // Verify that main method creates a new instanceof B associated with // a new instance of Test. assertTranslatedLines(translation, "void Test_mainWithNSStringArray_(IOSObjectArray *args) {", "Test_initialize();", "Test_B *b = create_Test_B_initWithTest_(create_Test_init());"); // Verify that BInner's constructor takes a B instance and correctly calls // the super constructor. assertTranslatedLines(translation, "void Test_B_BInner_initWithTest_B_(Test_B_BInner *self, Test_B *outer$) {", " Test_A_Inner_initWithTest_A_(self, outer$);", "}"); } // Identical sample code to above test, except the order of B and A is switched. public void testInnerSubClassOfOtherInnerWithOuterRefsExtraction2() throws IOException { String source = "public class Test { " + "int i; " + "class B extends A { " + " public class BInner extends A.Inner { } } " + "class A { " + " private void foo() { i++; } " + " public class Inner { Inner() { foo(); } } } " + "public static void main(String[] args) { B b = new Test().new B(); }}"; // Verify that B's translation has the Test field declared. String translation = translateSourceFile(source, "Test", "Test.m"); assertTranslation(translation, "Test *this$0_;"); // Verify that A has a Test field (this$0). assertTranslatedLines(translation, "@interface Test_A () {", "@public", "Test *this$0_;", "}"); // Verify that B does not have a Test field. assertNotInTranslation(translation, "@interface Test_B () {"); // Verify that main method creates a new instanceof B associated with // a new instance of Test. assertTranslatedLines(translation, "void Test_mainWithNSStringArray_(IOSObjectArray *args) {", "Test_initialize();", "Test_B *b = create_Test_B_initWithTest_(create_Test_init());"); // Verify that BInner's constructor takes a B instance and correctly calls // the super constructor. assertTranslatedLines(translation, "void Test_B_BInner_initWithTest_B_(Test_B_BInner *self, Test_B *outer$) {", " Test_A_Inner_initWithTest_A_(self, outer$);", "}"); } // Identical sample code to above test, except A is a generic class. public void testInnerSubClassOfOtherInnerWithOuterRefsWithGenerics() throws IOException { String source = "public class Test { " + "class B extends A<B.BInner> { " + " public class BInner extends A<B.BInner>.Inner { BInner() { super(null); } } } " + "class A<T extends A<T>.Inner> { " + " private void foo() { } " + " public class Inner { Inner(T t) { foo(); } } } " + "public static void main(String[] args) { B b = new Test().new B(); }}"; String translation = translateSourceFile(source, "Test", "Test.m"); // Make sure that the call to super(null) in B.BInner's constructor // is translated with the right keyword for the generic second parameter. assertTranslation(translation, "Test_A_Inner_initWithTest_A_withTest_A_Inner_(self, outer$, nil);"); } public void testStaticImportReferenceInInnerClass() throws IOException { String translation = translateSourceFile( "import static java.lang.Character.isDigit; public class Test { class Inner { " + " public void foo() { boolean b = isDigit('c'); } } }", "Test", "Test.m"); assertTranslation(translation, "JavaLangCharacter_isDigitWithChar_('c')"); } public void testStaticReferenceInInnerClass() throws IOException { String translation = translateSourceFile( "public class Test { public static void foo() { } class Inner { " + " public void bar() { foo(); } } }", "Test", "Test.m"); assertTranslation(translation, "Test_foo()"); } public void testMethodInnerClass() throws IOException { String source = "public class A { void foo() { class MyRunnable implements Runnable {" + "public void run() {} }}}"; String translation = translateSourceFile(source, "A", "A.m"); assertTranslation(translation, "@interface A_1MyRunnable : NSObject < JavaLangRunnable >"); assertNotInTranslation(translation, "A *this"); } public void testInnerClassConstructor() throws IOException { String source = "public class A { class B { Object test() { return new B(); }}}"; String translation = translateSourceFile(source, "A", "A.m"); assertTranslation(translation, "return create_A_B_initWithA_(this$0_);"); } public void testMethodInnerClassWithSameName() throws IOException { String source = "public class A { class MyClass {} void foo() { class MyClass {}}}"; String translation = translateSourceFile(source, "A", "A.h"); assertTranslation(translation, "@interface A_MyClass"); translation = getTranslatedFile("A.m"); assertTranslation(translation, "@interface A_1MyClass"); } public void testOuterThisReferenceInInner() throws IOException { String translation = translateSourceFile( "class Test { " + " class Inner { Inner(int i) { } Inner foo() { return new Inner(1); } } " + " public Inner bar() { return new Inner(2); } }", "Test", "Test.m"); assertTranslation(translation, "create_Test_Inner_initWithTest_withInt_(this$0_, 1)"); assertTranslation(translation, "create_Test_Inner_initWithTest_withInt_(self, 2)"); } public void testInnerThisReferenceInInnerAsFieldAccess() throws IOException { String translation = translateSourceFile( "class Test { " + " class Inner { int i = 0; Inner() { Inner.this.i = 1; } } }", "Test", "Test.m"); assertTranslation(translation, "self->i_ = 1"); } public void testInnerThisReferenceInInnerAsThisExpression() throws IOException { String translation = translateSourceFile( "class Test { " + " static void foo(Inner i) { } " + " class Inner { Inner() { foo(Inner.this); } } }", "Test", "Test.m"); assertTranslation(translation, "Test_fooWithTest_Inner_(self)"); } // Verify that an anonymous class in a static initializer does not reference // instance. public void testNoOuterInStaticInitializer() throws IOException { String source = "import java.util.*; " + "public class A { static { foo(new Enumeration() { " + " public boolean hasMoreElements() { return false; }" + " public Object nextElement() { return null; }}); }" + " public static void foo(Object o) { } }"; String translation = translateSourceFile(source, "A", "A.h"); assertNotInTranslation(translation, "this$0_"); translation = getTranslatedFile("A.m"); assertNotInTranslation(translation, "this$0_"); assertTranslation(translation, "A_fooWithId_(create_A_1_init())"); } // Verify that an anonymous class assigned to a static field does not // reference instance. public void testNoOuterWhenAssignedToStaticField() throws IOException { String source = "import java.util.*; " + "public class A { static Enumeration test = new Enumeration() { " + " public boolean hasMoreElements() { return false; }" + " public Object nextElement() { return null; }}; }"; String translation = translateSourceFile(source, "A", "A.h"); assertFalse(translation.contains("this$0_")); translation = getTranslatedFile("A.m"); assertFalse(translation.contains("this$0_")); assertTranslation(translation, "JreStrongAssignAndConsume(&A_test, new_A_1_init());"); } // Verify that an anonymous class in a static method does not reference // instance. public void testNoOuterWhenInStaticMethod() throws IOException { String source = "import java.util.*; " + "public class A { static Enumeration test(Collection collection) { " + " final Collection c = collection; " + " return new Enumeration() { " + " Iterator it = c.iterator(); " + " public boolean hasMoreElements() { return it.hasNext(); }" + " public Object nextElement() { return it.next(); }}; }}"; String translation = translateSourceFile(source, "A", "A.m"); assertNotInTranslation(translation, "this$0_"); assertTranslation(translation, "- (instancetype)initWithJavaUtilCollection:(id<JavaUtilCollection>)capture$0;"); assertTranslation(translation, "[((id<JavaUtilCollection>) nil_chk(capture$0)) iterator]"); assertTranslation(translation, "return create_A_1_initWithJavaUtilCollection_(c);"); assertTranslation(translation, "- (instancetype)initWithJavaUtilCollection:(id<JavaUtilCollection>)capture$0 {"); } public void testInnerAccessingOuterArrayLength() throws IOException { String source = "public class A<E> { transient E[] elements; " + "private class B implements java.util.Iterator<E> { " + "public boolean hasNext() { return elements.length > 0; } " + "public E next() { return null; }" + "public void remove() {} }}"; String translation = translateSourceFile(source, "A", "A.m"); assertTranslation(translation, "- (instancetype)initWithA:(A *)outer$;"); assertTranslation(translation, "A *this$0_;"); assertTranslation(translation, "((IOSObjectArray *) nil_chk(this$0_->elements_))->size_"); } public void testCreateInnerClassOfSuperclass() throws IOException { String source = "class B {\n" + " class C {}\n" + "}\n" + "class A extends B {\n" + " void foo() { new C(); }\n" + "}\n"; String translation = translateSourceFile(source, "A", "A.m"); assertTranslation(translation, "create_B_C_initWithB_(self)"); } public void testCallInnerConstructorOfParameterizedOuterClass() throws IOException { String outerSource = "abstract class Outer<T> { class Inner { public Inner(T t) {} }}"; String callerSource = "class A extends Outer<String> { public void foo() { new Inner(\"test\"); } }"; addSourceFile(outerSource, "Outer.java"); addSourceFile(callerSource, "A.java"); String translation = translateSourceFile("A", "A.m"); assertTranslation(translation, "create_Outer_Inner_initWithOuter_withId_(self, @\"test\");"); } public void testNoOuterFieldAssignmentWhenCallingOtherConstructor() throws IOException { String source = "class Outer { class Inner { Inner(int i) {} Inner() { this(42); } } }"; String translation = translateSourceFile(source, "Outer", "Outer.m"); assertTranslatedLines(translation, "void Outer_Inner_initWithOuter_(Outer_Inner *self, Outer *outer$) {", " Outer_Inner_initWithOuter_withInt_(self, outer$, 42);", "}"); } public void testListArgsInEnumConstantDeclaration() throws IOException { String source = "class Outer { " + " enum Inner { " + " A(new String[] { \"1\", \"2\", \"3\" }), " + " B(new String[] { \"4\", \"5\", \"6\" }); " + " Inner(String[] values) {} " + " } " + "}"; String translation = translateSourceFile(source, "Outer", "Outer.m"); assertTranslation(translation, "[IOSObjectArray arrayWithObjects:(id[]){ " + "@\"1\", @\"2\", @\"3\" } count:3 type:NSString_class_()]"); assertTranslation(translation, "[IOSObjectArray arrayWithObjects:(id[]){ " + "@\"4\", @\"5\", @\"6\" } count:3 type:NSString_class_()]"); } public void testInnerClassVarargsConstructor() throws IOException { String translation = translateSourceFile( "class Test { class Inner { Inner(int... i) {} } void test() { new Inner(1, 2, 3); } }", "Test", "Test.m"); assertTranslation(translation, "create_Test_Inner_initWithTest_withIntArray_(self, " + "[IOSIntArray arrayWithInts:(int32_t[]){ 1, 2, 3 } count:3])"); } public void testInnerClassConstructedInSuperConstructorInvocation() throws IOException { String translation = translateSourceFile( "class Outer { " + " class Inner1 { } " + " class Inner2Super { Inner2Super(Inner1 i) { } } " + " class Inner2 extends Inner2Super { " + " Inner2() { " + " super(new Inner1()); " + " } " + " } " + "}", "Outer", "Outer.m"); assertTranslation(translation, "create_Outer_Inner1_initWithOuter_(outer$)"); } public void testOuterReferenceInSuperConstructorInvocation() throws IOException { String translation = translateSourceFile( "class Outer { " + " int foo; " + " class Inner1 { Inner1(int i) { } } " + " class Inner2 extends Inner1 { " + " Inner2() { " + " super(foo); " + " } " + " } " + "}", "Outer", "Outer.m"); assertTranslation(translation, "Outer_Inner1_initWithOuter_withInt_(self, outer$, outer$->foo_);"); } public void testOuterThisReferenceInSuperConstructorInvocation() throws IOException { String translation = translateSourceFile( "class Outer { " + " int foo; " + " class Outer1 { " + " int foo; " + " class Inner1 { Inner1(int i) { } } " + " class Inner2 extends Inner1 { " + " Inner2() { " + " super(Outer.this.foo); " + " } " + " } " + " } " + "}", "Outer", "Outer.m"); assertTranslation(translation, "Outer_Outer1_Inner1_initWithOuter_Outer1_withInt_(self, outer$, outer$->this$0_->foo_);"); } public void testAnonymousClassWithinTypeDeclarationStatement() throws IOException { String translation = translateSourceFile( "class Test { Runnable foo() { class MyRunnable implements Runnable { " + "public void run() { Runnable r = new Runnable() { public void run() {} }; } } " + "return new MyRunnable(); } }", "Test", "Test.m"); assertOccurrences(translation, "@interface Test_1MyRunnable_1", 1); } public void testOuterInitializedBeforeSuperInit() throws IOException { String translation = translateSourceFile( "class Test { int i; class Inner { void test() { i++; } } }", "Test", "Test.m"); assertTranslatedLines(translation, "void Test_Inner_initWithTest_(Test_Inner *self, Test *outer$) {", " JreStrongAssign(&self->this$0_, outer$);", " NSObject_init(self);", "}"); } public void testInnerClassOuterStackReference() throws IOException { String translation = translateSourceFile( "public class A { " + " void test() { " + " final Object obj = new Object(); " + " class TestThread extends Thread { " + " public void run() { " + " System.out.println(obj); }}}}", "A", "A.m"); assertTranslation(translation, "printlnWithId:val$obj_"); assertTranslation(translation, "id val$obj_;"); } public void testLocalClassWithCaptureVariables() throws IOException { String translation = translateSourceFile( "class Test { void test(final String s) { " + " class Inner { " + " Inner() { this(0); } " + " Inner(int i) { } " + " void foo() { s.toString(); } " + " } " + " new Inner(); } }", "Test", "Test.m"); assertTranslation(translation, "create_Test_1Inner_initWithNSString_(s)"); assertTranslatedLines(translation, "void Test_1Inner_initWithNSString_(Test_1Inner *self, NSString *capture$0) {", " Test_1Inner_initWithNSString_withInt_(self, capture$0, 0);", "}"); assertTranslatedLines(translation, "void Test_1Inner_initWithNSString_withInt_(" + "Test_1Inner *self, NSString *capture$0, int32_t i) {", " JreStrongAssign(&self->val$s_, capture$0);", " NSObject_init(self);", "}"); } public void testWeakStaticClass() throws IOException { String source = "import com.google.j2objc.annotations.WeakOuter; " + "public class A { @WeakOuter static class B {}}"; String translation = translateSourceFile(source, "A", "A.h"); assertWarning("static class A.B has WeakOuter annotation"); assertNoErrors(); assertNotInTranslation(translation, "__unsafe_unretained"); } public void testInnerClassWithVarargsAndCaptureVariables() throws IOException { String translation = translateSourceFile( "class Test { int test(final int i, Object o) { class Inner { Inner(Object... o) {} " + "int foo() { return i; } } return new Inner(o).foo(); } }", "Test", "Test.m"); assertTranslation(translation, "create_Test_1Inner_initWithInt_withNSObjectArray_(i, " + "[IOSObjectArray arrayWithObjects:(id[]){ o } count:1 type:NSObject_class_()])"); assertTranslation(translation, "void Test_1Inner_initWithInt_withNSObjectArray_(" + "Test_1Inner *self, int32_t capture$0, IOSObjectArray *o)"); } }
googleapis/google-cloud-java
36,202
java-dataplex/proto-google-cloud-dataplex-v1/src/main/java/com/google/cloud/dataplex/v1/UpdateEnvironmentRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dataplex/v1/service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dataplex.v1; /** * * * <pre> * Update environment request. * </pre> * * Protobuf type {@code google.cloud.dataplex.v1.UpdateEnvironmentRequest} */ public final class UpdateEnvironmentRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dataplex.v1.UpdateEnvironmentRequest) UpdateEnvironmentRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateEnvironmentRequest.newBuilder() to construct. private UpdateEnvironmentRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateEnvironmentRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateEnvironmentRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataplex.v1.ServiceProto .internal_static_google_cloud_dataplex_v1_UpdateEnvironmentRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataplex.v1.ServiceProto .internal_static_google_cloud_dataplex_v1_UpdateEnvironmentRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataplex.v1.UpdateEnvironmentRequest.class, com.google.cloud.dataplex.v1.UpdateEnvironmentRequest.Builder.class); } private int bitField0_; public static final int UPDATE_MASK_FIELD_NUMBER = 1; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } public static final int ENVIRONMENT_FIELD_NUMBER = 2; private com.google.cloud.dataplex.v1.Environment environment_; /** * * * <pre> * Required. Update description. * Only fields specified in `update_mask` are updated. * </pre> * * <code> * .google.cloud.dataplex.v1.Environment environment = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the environment field is set. */ @java.lang.Override public boolean hasEnvironment() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Update description. * Only fields specified in `update_mask` are updated. * </pre> * * <code> * .google.cloud.dataplex.v1.Environment environment = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The environment. */ @java.lang.Override public com.google.cloud.dataplex.v1.Environment getEnvironment() { return environment_ == null ? com.google.cloud.dataplex.v1.Environment.getDefaultInstance() : environment_; } /** * * * <pre> * Required. Update description. * Only fields specified in `update_mask` are updated. * </pre> * * <code> * .google.cloud.dataplex.v1.Environment environment = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.dataplex.v1.EnvironmentOrBuilder getEnvironmentOrBuilder() { return environment_ == null ? com.google.cloud.dataplex.v1.Environment.getDefaultInstance() : environment_; } public static final int VALIDATE_ONLY_FIELD_NUMBER = 3; private boolean validateOnly_ = false; /** * * * <pre> * Optional. Only validate the request, but do not perform mutations. * The default is false. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getUpdateMask()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getEnvironment()); } if (validateOnly_ != false) { output.writeBool(3, validateOnly_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getEnvironment()); } if (validateOnly_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, validateOnly_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dataplex.v1.UpdateEnvironmentRequest)) { return super.equals(obj); } com.google.cloud.dataplex.v1.UpdateEnvironmentRequest other = (com.google.cloud.dataplex.v1.UpdateEnvironmentRequest) obj; if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (hasEnvironment() != other.hasEnvironment()) return false; if (hasEnvironment()) { if (!getEnvironment().equals(other.getEnvironment())) return false; } if (getValidateOnly() != other.getValidateOnly()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } if (hasEnvironment()) { hash = (37 * hash) + ENVIRONMENT_FIELD_NUMBER; hash = (53 * hash) + getEnvironment().hashCode(); } hash = (37 * hash) + VALIDATE_ONLY_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getValidateOnly()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dataplex.v1.UpdateEnvironmentRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataplex.v1.UpdateEnvironmentRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataplex.v1.UpdateEnvironmentRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataplex.v1.UpdateEnvironmentRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataplex.v1.UpdateEnvironmentRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataplex.v1.UpdateEnvironmentRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataplex.v1.UpdateEnvironmentRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataplex.v1.UpdateEnvironmentRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataplex.v1.UpdateEnvironmentRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dataplex.v1.UpdateEnvironmentRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataplex.v1.UpdateEnvironmentRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataplex.v1.UpdateEnvironmentRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.dataplex.v1.UpdateEnvironmentRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Update environment request. * </pre> * * Protobuf type {@code google.cloud.dataplex.v1.UpdateEnvironmentRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dataplex.v1.UpdateEnvironmentRequest) com.google.cloud.dataplex.v1.UpdateEnvironmentRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataplex.v1.ServiceProto .internal_static_google_cloud_dataplex_v1_UpdateEnvironmentRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataplex.v1.ServiceProto .internal_static_google_cloud_dataplex_v1_UpdateEnvironmentRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataplex.v1.UpdateEnvironmentRequest.class, com.google.cloud.dataplex.v1.UpdateEnvironmentRequest.Builder.class); } // Construct using com.google.cloud.dataplex.v1.UpdateEnvironmentRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getUpdateMaskFieldBuilder(); getEnvironmentFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } environment_ = null; if (environmentBuilder_ != null) { environmentBuilder_.dispose(); environmentBuilder_ = null; } validateOnly_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dataplex.v1.ServiceProto .internal_static_google_cloud_dataplex_v1_UpdateEnvironmentRequest_descriptor; } @java.lang.Override public com.google.cloud.dataplex.v1.UpdateEnvironmentRequest getDefaultInstanceForType() { return com.google.cloud.dataplex.v1.UpdateEnvironmentRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dataplex.v1.UpdateEnvironmentRequest build() { com.google.cloud.dataplex.v1.UpdateEnvironmentRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dataplex.v1.UpdateEnvironmentRequest buildPartial() { com.google.cloud.dataplex.v1.UpdateEnvironmentRequest result = new com.google.cloud.dataplex.v1.UpdateEnvironmentRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.dataplex.v1.UpdateEnvironmentRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.environment_ = environmentBuilder_ == null ? environment_ : environmentBuilder_.build(); to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.validateOnly_ = validateOnly_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dataplex.v1.UpdateEnvironmentRequest) { return mergeFrom((com.google.cloud.dataplex.v1.UpdateEnvironmentRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dataplex.v1.UpdateEnvironmentRequest other) { if (other == com.google.cloud.dataplex.v1.UpdateEnvironmentRequest.getDefaultInstance()) return this; if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } if (other.hasEnvironment()) { mergeEnvironment(other.getEnvironment()); } if (other.getValidateOnly() != false) { setValidateOnly(other.getValidateOnly()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getEnvironmentFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 24: { validateOnly_ = input.readBool(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000001); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000001; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } private com.google.cloud.dataplex.v1.Environment environment_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataplex.v1.Environment, com.google.cloud.dataplex.v1.Environment.Builder, com.google.cloud.dataplex.v1.EnvironmentOrBuilder> environmentBuilder_; /** * * * <pre> * Required. Update description. * Only fields specified in `update_mask` are updated. * </pre> * * <code> * .google.cloud.dataplex.v1.Environment environment = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the environment field is set. */ public boolean hasEnvironment() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Update description. * Only fields specified in `update_mask` are updated. * </pre> * * <code> * .google.cloud.dataplex.v1.Environment environment = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The environment. */ public com.google.cloud.dataplex.v1.Environment getEnvironment() { if (environmentBuilder_ == null) { return environment_ == null ? com.google.cloud.dataplex.v1.Environment.getDefaultInstance() : environment_; } else { return environmentBuilder_.getMessage(); } } /** * * * <pre> * Required. Update description. * Only fields specified in `update_mask` are updated. * </pre> * * <code> * .google.cloud.dataplex.v1.Environment environment = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setEnvironment(com.google.cloud.dataplex.v1.Environment value) { if (environmentBuilder_ == null) { if (value == null) { throw new NullPointerException(); } environment_ = value; } else { environmentBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Update description. * Only fields specified in `update_mask` are updated. * </pre> * * <code> * .google.cloud.dataplex.v1.Environment environment = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setEnvironment( com.google.cloud.dataplex.v1.Environment.Builder builderForValue) { if (environmentBuilder_ == null) { environment_ = builderForValue.build(); } else { environmentBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Update description. * Only fields specified in `update_mask` are updated. * </pre> * * <code> * .google.cloud.dataplex.v1.Environment environment = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeEnvironment(com.google.cloud.dataplex.v1.Environment value) { if (environmentBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && environment_ != null && environment_ != com.google.cloud.dataplex.v1.Environment.getDefaultInstance()) { getEnvironmentBuilder().mergeFrom(value); } else { environment_ = value; } } else { environmentBuilder_.mergeFrom(value); } if (environment_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. Update description. * Only fields specified in `update_mask` are updated. * </pre> * * <code> * .google.cloud.dataplex.v1.Environment environment = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearEnvironment() { bitField0_ = (bitField0_ & ~0x00000002); environment_ = null; if (environmentBuilder_ != null) { environmentBuilder_.dispose(); environmentBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Update description. * Only fields specified in `update_mask` are updated. * </pre> * * <code> * .google.cloud.dataplex.v1.Environment environment = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dataplex.v1.Environment.Builder getEnvironmentBuilder() { bitField0_ |= 0x00000002; onChanged(); return getEnvironmentFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Update description. * Only fields specified in `update_mask` are updated. * </pre> * * <code> * .google.cloud.dataplex.v1.Environment environment = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dataplex.v1.EnvironmentOrBuilder getEnvironmentOrBuilder() { if (environmentBuilder_ != null) { return environmentBuilder_.getMessageOrBuilder(); } else { return environment_ == null ? com.google.cloud.dataplex.v1.Environment.getDefaultInstance() : environment_; } } /** * * * <pre> * Required. Update description. * Only fields specified in `update_mask` are updated. * </pre> * * <code> * .google.cloud.dataplex.v1.Environment environment = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataplex.v1.Environment, com.google.cloud.dataplex.v1.Environment.Builder, com.google.cloud.dataplex.v1.EnvironmentOrBuilder> getEnvironmentFieldBuilder() { if (environmentBuilder_ == null) { environmentBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataplex.v1.Environment, com.google.cloud.dataplex.v1.Environment.Builder, com.google.cloud.dataplex.v1.EnvironmentOrBuilder>( getEnvironment(), getParentForChildren(), isClean()); environment_ = null; } return environmentBuilder_; } private boolean validateOnly_; /** * * * <pre> * Optional. Only validate the request, but do not perform mutations. * The default is false. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } /** * * * <pre> * Optional. Only validate the request, but do not perform mutations. * The default is false. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The validateOnly to set. * @return This builder for chaining. */ public Builder setValidateOnly(boolean value) { validateOnly_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. Only validate the request, but do not perform mutations. * The default is false. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearValidateOnly() { bitField0_ = (bitField0_ & ~0x00000004); validateOnly_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dataplex.v1.UpdateEnvironmentRequest) } // @@protoc_insertion_point(class_scope:google.cloud.dataplex.v1.UpdateEnvironmentRequest) private static final com.google.cloud.dataplex.v1.UpdateEnvironmentRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dataplex.v1.UpdateEnvironmentRequest(); } public static com.google.cloud.dataplex.v1.UpdateEnvironmentRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateEnvironmentRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateEnvironmentRequest>() { @java.lang.Override public UpdateEnvironmentRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateEnvironmentRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateEnvironmentRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dataplex.v1.UpdateEnvironmentRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,232
java-dlp/proto-google-cloud-dlp-v2/src/main/java/com/google/privacy/dlp/v2/DiscoveryOtherCloudConditions.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/privacy/dlp/v2/dlp.proto // Protobuf Java Version: 3.25.8 package com.google.privacy.dlp.v2; /** * * * <pre> * Requirements that must be true before a resource is profiled for the first * time. * </pre> * * Protobuf type {@code google.privacy.dlp.v2.DiscoveryOtherCloudConditions} */ public final class DiscoveryOtherCloudConditions extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.privacy.dlp.v2.DiscoveryOtherCloudConditions) DiscoveryOtherCloudConditionsOrBuilder { private static final long serialVersionUID = 0L; // Use DiscoveryOtherCloudConditions.newBuilder() to construct. private DiscoveryOtherCloudConditions(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DiscoveryOtherCloudConditions() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new DiscoveryOtherCloudConditions(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_DiscoveryOtherCloudConditions_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_DiscoveryOtherCloudConditions_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions.class, com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions.Builder.class); } private int bitField0_; private int conditionsCase_ = 0; @SuppressWarnings("serial") private java.lang.Object conditions_; public enum ConditionsCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { AMAZON_S3_BUCKET_CONDITIONS(2), CONDITIONS_NOT_SET(0); private final int value; private ConditionsCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static ConditionsCase valueOf(int value) { return forNumber(value); } public static ConditionsCase forNumber(int value) { switch (value) { case 2: return AMAZON_S3_BUCKET_CONDITIONS; case 0: return CONDITIONS_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public ConditionsCase getConditionsCase() { return ConditionsCase.forNumber(conditionsCase_); } public static final int MIN_AGE_FIELD_NUMBER = 1; private com.google.protobuf.Duration minAge_; /** * * * <pre> * Minimum age a resource must be before Cloud DLP can profile it. Value must * be 1 hour or greater. * </pre> * * <code>.google.protobuf.Duration min_age = 1;</code> * * @return Whether the minAge field is set. */ @java.lang.Override public boolean hasMinAge() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Minimum age a resource must be before Cloud DLP can profile it. Value must * be 1 hour or greater. * </pre> * * <code>.google.protobuf.Duration min_age = 1;</code> * * @return The minAge. */ @java.lang.Override public com.google.protobuf.Duration getMinAge() { return minAge_ == null ? com.google.protobuf.Duration.getDefaultInstance() : minAge_; } /** * * * <pre> * Minimum age a resource must be before Cloud DLP can profile it. Value must * be 1 hour or greater. * </pre> * * <code>.google.protobuf.Duration min_age = 1;</code> */ @java.lang.Override public com.google.protobuf.DurationOrBuilder getMinAgeOrBuilder() { return minAge_ == null ? com.google.protobuf.Duration.getDefaultInstance() : minAge_; } public static final int AMAZON_S3_BUCKET_CONDITIONS_FIELD_NUMBER = 2; /** * * * <pre> * Amazon S3 bucket conditions. * </pre> * * <code>.google.privacy.dlp.v2.AmazonS3BucketConditions amazon_s3_bucket_conditions = 2;</code> * * @return Whether the amazonS3BucketConditions field is set. */ @java.lang.Override public boolean hasAmazonS3BucketConditions() { return conditionsCase_ == 2; } /** * * * <pre> * Amazon S3 bucket conditions. * </pre> * * <code>.google.privacy.dlp.v2.AmazonS3BucketConditions amazon_s3_bucket_conditions = 2;</code> * * @return The amazonS3BucketConditions. */ @java.lang.Override public com.google.privacy.dlp.v2.AmazonS3BucketConditions getAmazonS3BucketConditions() { if (conditionsCase_ == 2) { return (com.google.privacy.dlp.v2.AmazonS3BucketConditions) conditions_; } return com.google.privacy.dlp.v2.AmazonS3BucketConditions.getDefaultInstance(); } /** * * * <pre> * Amazon S3 bucket conditions. * </pre> * * <code>.google.privacy.dlp.v2.AmazonS3BucketConditions amazon_s3_bucket_conditions = 2;</code> */ @java.lang.Override public com.google.privacy.dlp.v2.AmazonS3BucketConditionsOrBuilder getAmazonS3BucketConditionsOrBuilder() { if (conditionsCase_ == 2) { return (com.google.privacy.dlp.v2.AmazonS3BucketConditions) conditions_; } return com.google.privacy.dlp.v2.AmazonS3BucketConditions.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getMinAge()); } if (conditionsCase_ == 2) { output.writeMessage(2, (com.google.privacy.dlp.v2.AmazonS3BucketConditions) conditions_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getMinAge()); } if (conditionsCase_ == 2) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 2, (com.google.privacy.dlp.v2.AmazonS3BucketConditions) conditions_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions)) { return super.equals(obj); } com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions other = (com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions) obj; if (hasMinAge() != other.hasMinAge()) return false; if (hasMinAge()) { if (!getMinAge().equals(other.getMinAge())) return false; } if (!getConditionsCase().equals(other.getConditionsCase())) return false; switch (conditionsCase_) { case 2: if (!getAmazonS3BucketConditions().equals(other.getAmazonS3BucketConditions())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasMinAge()) { hash = (37 * hash) + MIN_AGE_FIELD_NUMBER; hash = (53 * hash) + getMinAge().hashCode(); } switch (conditionsCase_) { case 2: hash = (37 * hash) + AMAZON_S3_BUCKET_CONDITIONS_FIELD_NUMBER; hash = (53 * hash) + getAmazonS3BucketConditions().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Requirements that must be true before a resource is profiled for the first * time. * </pre> * * Protobuf type {@code google.privacy.dlp.v2.DiscoveryOtherCloudConditions} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.privacy.dlp.v2.DiscoveryOtherCloudConditions) com.google.privacy.dlp.v2.DiscoveryOtherCloudConditionsOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_DiscoveryOtherCloudConditions_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_DiscoveryOtherCloudConditions_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions.class, com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions.Builder.class); } // Construct using com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getMinAgeFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; minAge_ = null; if (minAgeBuilder_ != null) { minAgeBuilder_.dispose(); minAgeBuilder_ = null; } if (amazonS3BucketConditionsBuilder_ != null) { amazonS3BucketConditionsBuilder_.clear(); } conditionsCase_ = 0; conditions_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_DiscoveryOtherCloudConditions_descriptor; } @java.lang.Override public com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions getDefaultInstanceForType() { return com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions.getDefaultInstance(); } @java.lang.Override public com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions build() { com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions buildPartial() { com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions result = new com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.minAge_ = minAgeBuilder_ == null ? minAge_ : minAgeBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } private void buildPartialOneofs( com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions result) { result.conditionsCase_ = conditionsCase_; result.conditions_ = this.conditions_; if (conditionsCase_ == 2 && amazonS3BucketConditionsBuilder_ != null) { result.conditions_ = amazonS3BucketConditionsBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions) { return mergeFrom((com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions other) { if (other == com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions.getDefaultInstance()) return this; if (other.hasMinAge()) { mergeMinAge(other.getMinAge()); } switch (other.getConditionsCase()) { case AMAZON_S3_BUCKET_CONDITIONS: { mergeAmazonS3BucketConditions(other.getAmazonS3BucketConditions()); break; } case CONDITIONS_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getMinAgeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getAmazonS3BucketConditionsFieldBuilder().getBuilder(), extensionRegistry); conditionsCase_ = 2; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int conditionsCase_ = 0; private java.lang.Object conditions_; public ConditionsCase getConditionsCase() { return ConditionsCase.forNumber(conditionsCase_); } public Builder clearConditions() { conditionsCase_ = 0; conditions_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.Duration minAge_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> minAgeBuilder_; /** * * * <pre> * Minimum age a resource must be before Cloud DLP can profile it. Value must * be 1 hour or greater. * </pre> * * <code>.google.protobuf.Duration min_age = 1;</code> * * @return Whether the minAge field is set. */ public boolean hasMinAge() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Minimum age a resource must be before Cloud DLP can profile it. Value must * be 1 hour or greater. * </pre> * * <code>.google.protobuf.Duration min_age = 1;</code> * * @return The minAge. */ public com.google.protobuf.Duration getMinAge() { if (minAgeBuilder_ == null) { return minAge_ == null ? com.google.protobuf.Duration.getDefaultInstance() : minAge_; } else { return minAgeBuilder_.getMessage(); } } /** * * * <pre> * Minimum age a resource must be before Cloud DLP can profile it. Value must * be 1 hour or greater. * </pre> * * <code>.google.protobuf.Duration min_age = 1;</code> */ public Builder setMinAge(com.google.protobuf.Duration value) { if (minAgeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } minAge_ = value; } else { minAgeBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Minimum age a resource must be before Cloud DLP can profile it. Value must * be 1 hour or greater. * </pre> * * <code>.google.protobuf.Duration min_age = 1;</code> */ public Builder setMinAge(com.google.protobuf.Duration.Builder builderForValue) { if (minAgeBuilder_ == null) { minAge_ = builderForValue.build(); } else { minAgeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Minimum age a resource must be before Cloud DLP can profile it. Value must * be 1 hour or greater. * </pre> * * <code>.google.protobuf.Duration min_age = 1;</code> */ public Builder mergeMinAge(com.google.protobuf.Duration value) { if (minAgeBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && minAge_ != null && minAge_ != com.google.protobuf.Duration.getDefaultInstance()) { getMinAgeBuilder().mergeFrom(value); } else { minAge_ = value; } } else { minAgeBuilder_.mergeFrom(value); } if (minAge_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Minimum age a resource must be before Cloud DLP can profile it. Value must * be 1 hour or greater. * </pre> * * <code>.google.protobuf.Duration min_age = 1;</code> */ public Builder clearMinAge() { bitField0_ = (bitField0_ & ~0x00000001); minAge_ = null; if (minAgeBuilder_ != null) { minAgeBuilder_.dispose(); minAgeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Minimum age a resource must be before Cloud DLP can profile it. Value must * be 1 hour or greater. * </pre> * * <code>.google.protobuf.Duration min_age = 1;</code> */ public com.google.protobuf.Duration.Builder getMinAgeBuilder() { bitField0_ |= 0x00000001; onChanged(); return getMinAgeFieldBuilder().getBuilder(); } /** * * * <pre> * Minimum age a resource must be before Cloud DLP can profile it. Value must * be 1 hour or greater. * </pre> * * <code>.google.protobuf.Duration min_age = 1;</code> */ public com.google.protobuf.DurationOrBuilder getMinAgeOrBuilder() { if (minAgeBuilder_ != null) { return minAgeBuilder_.getMessageOrBuilder(); } else { return minAge_ == null ? com.google.protobuf.Duration.getDefaultInstance() : minAge_; } } /** * * * <pre> * Minimum age a resource must be before Cloud DLP can profile it. Value must * be 1 hour or greater. * </pre> * * <code>.google.protobuf.Duration min_age = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> getMinAgeFieldBuilder() { if (minAgeBuilder_ == null) { minAgeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder>( getMinAge(), getParentForChildren(), isClean()); minAge_ = null; } return minAgeBuilder_; } private com.google.protobuf.SingleFieldBuilderV3< com.google.privacy.dlp.v2.AmazonS3BucketConditions, com.google.privacy.dlp.v2.AmazonS3BucketConditions.Builder, com.google.privacy.dlp.v2.AmazonS3BucketConditionsOrBuilder> amazonS3BucketConditionsBuilder_; /** * * * <pre> * Amazon S3 bucket conditions. * </pre> * * <code>.google.privacy.dlp.v2.AmazonS3BucketConditions amazon_s3_bucket_conditions = 2;</code> * * @return Whether the amazonS3BucketConditions field is set. */ @java.lang.Override public boolean hasAmazonS3BucketConditions() { return conditionsCase_ == 2; } /** * * * <pre> * Amazon S3 bucket conditions. * </pre> * * <code>.google.privacy.dlp.v2.AmazonS3BucketConditions amazon_s3_bucket_conditions = 2;</code> * * @return The amazonS3BucketConditions. */ @java.lang.Override public com.google.privacy.dlp.v2.AmazonS3BucketConditions getAmazonS3BucketConditions() { if (amazonS3BucketConditionsBuilder_ == null) { if (conditionsCase_ == 2) { return (com.google.privacy.dlp.v2.AmazonS3BucketConditions) conditions_; } return com.google.privacy.dlp.v2.AmazonS3BucketConditions.getDefaultInstance(); } else { if (conditionsCase_ == 2) { return amazonS3BucketConditionsBuilder_.getMessage(); } return com.google.privacy.dlp.v2.AmazonS3BucketConditions.getDefaultInstance(); } } /** * * * <pre> * Amazon S3 bucket conditions. * </pre> * * <code>.google.privacy.dlp.v2.AmazonS3BucketConditions amazon_s3_bucket_conditions = 2;</code> */ public Builder setAmazonS3BucketConditions( com.google.privacy.dlp.v2.AmazonS3BucketConditions value) { if (amazonS3BucketConditionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } conditions_ = value; onChanged(); } else { amazonS3BucketConditionsBuilder_.setMessage(value); } conditionsCase_ = 2; return this; } /** * * * <pre> * Amazon S3 bucket conditions. * </pre> * * <code>.google.privacy.dlp.v2.AmazonS3BucketConditions amazon_s3_bucket_conditions = 2;</code> */ public Builder setAmazonS3BucketConditions( com.google.privacy.dlp.v2.AmazonS3BucketConditions.Builder builderForValue) { if (amazonS3BucketConditionsBuilder_ == null) { conditions_ = builderForValue.build(); onChanged(); } else { amazonS3BucketConditionsBuilder_.setMessage(builderForValue.build()); } conditionsCase_ = 2; return this; } /** * * * <pre> * Amazon S3 bucket conditions. * </pre> * * <code>.google.privacy.dlp.v2.AmazonS3BucketConditions amazon_s3_bucket_conditions = 2;</code> */ public Builder mergeAmazonS3BucketConditions( com.google.privacy.dlp.v2.AmazonS3BucketConditions value) { if (amazonS3BucketConditionsBuilder_ == null) { if (conditionsCase_ == 2 && conditions_ != com.google.privacy.dlp.v2.AmazonS3BucketConditions.getDefaultInstance()) { conditions_ = com.google.privacy.dlp.v2.AmazonS3BucketConditions.newBuilder( (com.google.privacy.dlp.v2.AmazonS3BucketConditions) conditions_) .mergeFrom(value) .buildPartial(); } else { conditions_ = value; } onChanged(); } else { if (conditionsCase_ == 2) { amazonS3BucketConditionsBuilder_.mergeFrom(value); } else { amazonS3BucketConditionsBuilder_.setMessage(value); } } conditionsCase_ = 2; return this; } /** * * * <pre> * Amazon S3 bucket conditions. * </pre> * * <code>.google.privacy.dlp.v2.AmazonS3BucketConditions amazon_s3_bucket_conditions = 2;</code> */ public Builder clearAmazonS3BucketConditions() { if (amazonS3BucketConditionsBuilder_ == null) { if (conditionsCase_ == 2) { conditionsCase_ = 0; conditions_ = null; onChanged(); } } else { if (conditionsCase_ == 2) { conditionsCase_ = 0; conditions_ = null; } amazonS3BucketConditionsBuilder_.clear(); } return this; } /** * * * <pre> * Amazon S3 bucket conditions. * </pre> * * <code>.google.privacy.dlp.v2.AmazonS3BucketConditions amazon_s3_bucket_conditions = 2;</code> */ public com.google.privacy.dlp.v2.AmazonS3BucketConditions.Builder getAmazonS3BucketConditionsBuilder() { return getAmazonS3BucketConditionsFieldBuilder().getBuilder(); } /** * * * <pre> * Amazon S3 bucket conditions. * </pre> * * <code>.google.privacy.dlp.v2.AmazonS3BucketConditions amazon_s3_bucket_conditions = 2;</code> */ @java.lang.Override public com.google.privacy.dlp.v2.AmazonS3BucketConditionsOrBuilder getAmazonS3BucketConditionsOrBuilder() { if ((conditionsCase_ == 2) && (amazonS3BucketConditionsBuilder_ != null)) { return amazonS3BucketConditionsBuilder_.getMessageOrBuilder(); } else { if (conditionsCase_ == 2) { return (com.google.privacy.dlp.v2.AmazonS3BucketConditions) conditions_; } return com.google.privacy.dlp.v2.AmazonS3BucketConditions.getDefaultInstance(); } } /** * * * <pre> * Amazon S3 bucket conditions. * </pre> * * <code>.google.privacy.dlp.v2.AmazonS3BucketConditions amazon_s3_bucket_conditions = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.privacy.dlp.v2.AmazonS3BucketConditions, com.google.privacy.dlp.v2.AmazonS3BucketConditions.Builder, com.google.privacy.dlp.v2.AmazonS3BucketConditionsOrBuilder> getAmazonS3BucketConditionsFieldBuilder() { if (amazonS3BucketConditionsBuilder_ == null) { if (!(conditionsCase_ == 2)) { conditions_ = com.google.privacy.dlp.v2.AmazonS3BucketConditions.getDefaultInstance(); } amazonS3BucketConditionsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.privacy.dlp.v2.AmazonS3BucketConditions, com.google.privacy.dlp.v2.AmazonS3BucketConditions.Builder, com.google.privacy.dlp.v2.AmazonS3BucketConditionsOrBuilder>( (com.google.privacy.dlp.v2.AmazonS3BucketConditions) conditions_, getParentForChildren(), isClean()); conditions_ = null; } conditionsCase_ = 2; onChanged(); return amazonS3BucketConditionsBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.privacy.dlp.v2.DiscoveryOtherCloudConditions) } // @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.DiscoveryOtherCloudConditions) private static final com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions(); } public static com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DiscoveryOtherCloudConditions> PARSER = new com.google.protobuf.AbstractParser<DiscoveryOtherCloudConditions>() { @java.lang.Override public DiscoveryOtherCloudConditions parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<DiscoveryOtherCloudConditions> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DiscoveryOtherCloudConditions> getParserForType() { return PARSER; } @java.lang.Override public com.google.privacy.dlp.v2.DiscoveryOtherCloudConditions getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/iceberg
35,902
snowflake/src/test/java/org/apache/iceberg/snowflake/TestJdbcSnowflakeClient.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.iceberg.snowflake; import static org.apache.iceberg.snowflake.JdbcSnowflakeClient.DATABASE_NOT_FOUND_ERROR_CODES; import static org.apache.iceberg.snowflake.JdbcSnowflakeClient.SCHEMA_NOT_FOUND_ERROR_CODES; import static org.apache.iceberg.snowflake.JdbcSnowflakeClient.TABLE_NOT_FOUND_ERROR_CODES; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.util.List; import org.apache.iceberg.ClientPool; import org.apache.iceberg.exceptions.NoSuchNamespaceException; import org.apache.iceberg.exceptions.NoSuchTableException; import org.apache.iceberg.jdbc.JdbcClientPool; import org.apache.iceberg.jdbc.UncheckedInterruptedException; import org.apache.iceberg.jdbc.UncheckedSQLException; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.ArgumentMatchers; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.junit.jupiter.MockitoSettings; import org.mockito.quality.Strictness; @ExtendWith(MockitoExtension.class) @MockitoSettings(strictness = Strictness.LENIENT) public class TestJdbcSnowflakeClient { @Mock private Connection mockConnection; @Mock private JdbcClientPool mockClientPool; @Mock private JdbcSnowflakeClient.QueryHarness mockQueryHarness; @Mock private ResultSet mockResultSet; private JdbcSnowflakeClient snowflakeClient; @SuppressWarnings("unchecked") @BeforeEach public void before() throws SQLException, InterruptedException { snowflakeClient = new JdbcSnowflakeClient(mockClientPool); snowflakeClient.setQueryHarness(mockQueryHarness); doAnswer(invocation -> ((ClientPool.Action) invocation.getArguments()[0]).run(mockConnection)) .when(mockClientPool) .run(any(ClientPool.Action.class)); doAnswer( invocation -> ((JdbcSnowflakeClient.ResultSetParser) invocation.getArguments()[2]) .parse(mockResultSet)) .when(mockQueryHarness) .query( any(Connection.class), any(String.class), any(JdbcSnowflakeClient.ResultSetParser.class), ArgumentMatchers.<String>any()); } @Test public void testNullClientPoolInConstructor() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> new JdbcSnowflakeClient(null)) .withMessageContaining("JdbcClientPool must be non-null"); } @SuppressWarnings("unchecked") @Test public void testDatabaseExists() throws SQLException { when(mockResultSet.next()).thenReturn(true).thenReturn(false); when(mockResultSet.getString("database_name")).thenReturn("DB_1"); when(mockResultSet.getString("name")).thenReturn("SCHEMA_1"); assertThat(snowflakeClient.databaseExists(SnowflakeIdentifier.ofDatabase("DB_1"))).isTrue(); verify(mockQueryHarness) .query( eq(mockConnection), eq("SHOW SCHEMAS IN DATABASE IDENTIFIER(?) LIMIT 1"), any(JdbcSnowflakeClient.ResultSetParser.class), eq("DB_1")); } @Test public void testDatabaseDoesntExist() throws SQLException { when(mockResultSet.next()) .thenThrow(new SQLException("Database does not exist", "2000", 2003, null)) .thenThrow( new SQLException( "Database does not exist, or operation cannot be performed", "2000", 2043, null)) .thenThrow( new SQLException("Database does not exist or not authorized", "2000", 2001, null)); // Error code 2003 assertThat(snowflakeClient.databaseExists(SnowflakeIdentifier.ofDatabase("DB_1"))).isFalse(); // Error code 2043 assertThat(snowflakeClient.databaseExists(SnowflakeIdentifier.ofDatabase("DB_1"))).isFalse(); // Error code 2001 assertThat(snowflakeClient.databaseExists(SnowflakeIdentifier.ofDatabase("DB_1"))).isFalse(); } @Test public void testDatabaseFailureWithOtherException() throws SQLException { Exception injectedException = new SQLException("Some other exception", "2000", 2, null); when(mockResultSet.next()).thenThrow(injectedException); assertThatExceptionOfType(UncheckedSQLException.class) .isThrownBy(() -> snowflakeClient.databaseExists(SnowflakeIdentifier.ofDatabase("DB_1"))) .withMessageContaining("Failed to check if database 'DATABASE: 'DB_1'' exists") .withCause(injectedException); } @SuppressWarnings("unchecked") @Test public void testDatabaseFailureWithInterruptedException() throws SQLException, InterruptedException { Exception injectedException = new InterruptedException("Fake interrupted exception"); when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException); assertThatExceptionOfType(UncheckedInterruptedException.class) .isThrownBy(() -> snowflakeClient.databaseExists(SnowflakeIdentifier.ofDatabase("DB_1"))) .withMessageContaining("Interrupted while checking if database 'DATABASE: 'DB_1'' exists") .withCause(injectedException); } @SuppressWarnings("unchecked") @Test public void testSchemaExists() throws SQLException { when(mockResultSet.next()) .thenReturn(true) .thenReturn(false) .thenReturn(true) .thenReturn(false); when(mockResultSet.getString("name")).thenReturn("DB1").thenReturn("SCHEMA1"); when(mockResultSet.getString("database_name")).thenReturn("DB1"); when(mockResultSet.getString("schema_name")).thenReturn("SCHEMA1"); assertThat(snowflakeClient.schemaExists(SnowflakeIdentifier.ofSchema("DB1", "SCHEMA1"))) .isTrue(); verify(mockQueryHarness) .query( eq(mockConnection), eq("SHOW SCHEMAS IN DATABASE IDENTIFIER(?) LIMIT 1"), any(JdbcSnowflakeClient.ResultSetParser.class), eq("DB1")); verify(mockQueryHarness) .query( eq(mockConnection), eq("SHOW TABLES IN SCHEMA IDENTIFIER(?) LIMIT 1"), any(JdbcSnowflakeClient.ResultSetParser.class), eq("DB1.SCHEMA1")); } @Test public void testSchemaDoesntExistNoSchemaFoundException() throws SQLException { when(mockResultSet.next()) // The Database exists check should pass, followed by Error code 2003 for Schema exists .thenReturn(true) .thenReturn(false) .thenThrow(new SQLException("Schema does not exist", "2000", 2003, null)) // The Database exists check should pass, followed by Error code 2043 for Schema exists .thenReturn(true) .thenReturn(false) .thenThrow( new SQLException( "Schema does not exist, or operation cannot be performed", "2000", 2043, null)) // The Database exists check should pass, followed by Error code 2001 for Schema exists .thenReturn(true) .thenReturn(false) .thenThrow(new SQLException("Schema does not exist or not authorized", "2000", 2001, null)); when(mockResultSet.getString("name")).thenReturn("DB1").thenReturn("SCHEMA1"); when(mockResultSet.getString("database_name")).thenReturn("DB1"); // Error code 2003 assertThat(snowflakeClient.schemaExists(SnowflakeIdentifier.ofSchema("DB_1", "SCHEMA_2"))) .isFalse(); // Error code 2043 assertThat(snowflakeClient.schemaExists(SnowflakeIdentifier.ofSchema("DB_1", "SCHEMA_2"))) .isFalse(); // Error code 2001 assertThat(snowflakeClient.schemaExists(SnowflakeIdentifier.ofSchema("DB_1", "SCHEMA_2"))) .isFalse(); } @Test public void testSchemaFailureWithOtherException() throws SQLException { Exception injectedException = new SQLException("Some other exception", "2000", 2, null); when(mockResultSet.next()) // The Database exists check should pass, followed by Error code 2 for Schema exists .thenReturn(true) .thenReturn(false) .thenThrow(injectedException); when(mockResultSet.getString("name")).thenReturn("DB1").thenReturn("SCHEMA1"); when(mockResultSet.getString("database_name")).thenReturn("DB1"); assertThatExceptionOfType(UncheckedSQLException.class) .isThrownBy( () -> snowflakeClient.schemaExists(SnowflakeIdentifier.ofSchema("DB_1", "SCHEMA_2"))) .withMessageContaining("Failed to check if schema 'SCHEMA: 'DB_1.SCHEMA_2'' exists") .withCause(injectedException); } @SuppressWarnings("unchecked") @Test public void testSchemaFailureWithInterruptedException() throws SQLException, InterruptedException { Exception injectedException = new InterruptedException("Fake Interrupted exception"); when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException); assertThatExceptionOfType(UncheckedInterruptedException.class) .isThrownBy( () -> snowflakeClient.schemaExists(SnowflakeIdentifier.ofSchema("DB_1", "SCHEMA_2"))) .withMessageContaining("Interrupted while checking if database 'DATABASE: 'DB_1'' exists") .withCause(injectedException); } @SuppressWarnings("unchecked") @Test public void testListDatabasesInAccount() throws SQLException { when(mockResultSet.next()).thenReturn(true).thenReturn(true).thenReturn(true).thenReturn(false); when(mockResultSet.getString("name")).thenReturn("DB_1").thenReturn("DB_2").thenReturn("DB_3"); List<SnowflakeIdentifier> actualList = snowflakeClient.listDatabases(); verify(mockQueryHarness) .query( eq(mockConnection), eq("SHOW DATABASES IN ACCOUNT"), any(JdbcSnowflakeClient.ResultSetParser.class)); assertThat(actualList) .containsExactly( SnowflakeIdentifier.ofDatabase("DB_1"), SnowflakeIdentifier.ofDatabase("DB_2"), SnowflakeIdentifier.ofDatabase("DB_3")); } /** * Any unexpected SQLException from the underlying connection will propagate out as a * UncheckedSQLException when listing databases at Root level. */ @SuppressWarnings("unchecked") @Test public void testListDatabasesSQLExceptionAtRootLevel() throws SQLException, InterruptedException { Exception injectedException = new SQLException(String.format("SQL exception with Error Code %d", 0), "2000", 0, null); when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException); assertThatExceptionOfType(UncheckedSQLException.class) .isThrownBy(() -> snowflakeClient.listDatabases()) .withMessageContaining("Failed to list databases") .withCause(injectedException); } /** * Any unexpected SQLException from the underlying connection will propagate out as an * UncheckedSQLException when listing databases if there is no error code. */ @SuppressWarnings("unchecked") @Test public void testListDatabasesSQLExceptionWithoutErrorCode() throws SQLException, InterruptedException { Exception injectedException = new SQLException("Fake SQL exception"); when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException); assertThatExceptionOfType(UncheckedSQLException.class) .isThrownBy(() -> snowflakeClient.listDatabases()) .withMessageContaining("Failed to list databases") .withCause(injectedException); } /** * Any unexpected InterruptedException from the underlying connection will propagate out as an * UncheckedInterruptedException when listing databases. */ @SuppressWarnings("unchecked") @Test public void testListDatabasesInterruptedException() throws SQLException, InterruptedException { Exception injectedException = new InterruptedException("Fake interrupted exception"); when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException); assertThatExceptionOfType(UncheckedInterruptedException.class) .isThrownBy(() -> snowflakeClient.listDatabases()) .withMessageContaining("Interrupted while listing databases") .withCause(injectedException); } /** * For the root scope, expect an underlying query to list schemas at the ACCOUNT level with no * query parameters. */ @SuppressWarnings("unchecked") @Test public void testListSchemasInAccount() throws SQLException { when(mockResultSet.next()).thenReturn(true).thenReturn(true).thenReturn(true).thenReturn(false); when(mockResultSet.getString("database_name")) .thenReturn("DB_1") .thenReturn("DB_1") .thenReturn("DB_2"); when(mockResultSet.getString("name")) .thenReturn("SCHEMA_1") .thenReturn("SCHEMA_2") .thenReturn("SCHEMA_3"); List<SnowflakeIdentifier> actualList = snowflakeClient.listSchemas(SnowflakeIdentifier.ofRoot()); verify(mockQueryHarness) .query( eq(mockConnection), eq("SHOW SCHEMAS IN ACCOUNT"), any(JdbcSnowflakeClient.ResultSetParser.class), eq(null)); assertThat(actualList) .containsExactly( SnowflakeIdentifier.ofSchema("DB_1", "SCHEMA_1"), SnowflakeIdentifier.ofSchema("DB_1", "SCHEMA_2"), SnowflakeIdentifier.ofSchema("DB_2", "SCHEMA_3")); } /** * For a DATABASE scope, expect an underlying query to list schemas at the DATABASE level and * supply the database as a query param in an IDENTIFIER. */ @SuppressWarnings("unchecked") @Test public void testListSchemasInDatabase() throws SQLException { when(mockResultSet.next()).thenReturn(true).thenReturn(true).thenReturn(false); when(mockResultSet.getString("database_name")).thenReturn("DB_1").thenReturn("DB_1"); when(mockResultSet.getString("name")).thenReturn("SCHEMA_1").thenReturn("SCHEMA_2"); List<SnowflakeIdentifier> actualList = snowflakeClient.listSchemas(SnowflakeIdentifier.ofDatabase("DB_1")); verify(mockQueryHarness) .query( eq(mockConnection), eq("SHOW SCHEMAS IN DATABASE IDENTIFIER(?)"), any(JdbcSnowflakeClient.ResultSetParser.class), eq("DB_1")); assertThat(actualList) .containsExactly( SnowflakeIdentifier.ofSchema("DB_1", "SCHEMA_1"), SnowflakeIdentifier.ofSchema("DB_1", "SCHEMA_2")); } /** * Any unexpected SQLException from the underlying connection will propagate out as an * UncheckedSQLException when listing schemas at Root level. */ @SuppressWarnings("unchecked") @Test public void testListSchemasSQLExceptionAtRootLevel() throws SQLException, InterruptedException { Exception injectedException = new SQLException(String.format("SQL exception with Error Code %d", 0), "2000", 0, null); when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException); assertThatExceptionOfType(UncheckedSQLException.class) .isThrownBy(() -> snowflakeClient.listSchemas(SnowflakeIdentifier.ofRoot())) .withMessageContaining("Failed to list schemas for scope 'ROOT: '''") .withCause(injectedException); } /** * Any unexpected SQLException with specific error codes from the underlying connection will * propagate out as a NoSuchNamespaceException when listing schemas at Database level. */ @SuppressWarnings("unchecked") @Test public void testListSchemasSQLExceptionAtDatabaseLevel() throws SQLException, InterruptedException { for (Integer errorCode : DATABASE_NOT_FOUND_ERROR_CODES) { Exception injectedException = new SQLException( String.format("SQL exception with Error Code %d", errorCode), "2000", errorCode, null); when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException); assertThatExceptionOfType(NoSuchNamespaceException.class) .isThrownBy(() -> snowflakeClient.listSchemas(SnowflakeIdentifier.ofDatabase("DB_1"))) .withMessageContaining( String.format( "Identifier not found: 'DATABASE: 'DB_1''. Underlying exception: 'SQL exception with Error Code %d'", errorCode)) .withCause(injectedException); } } /** List schemas is not supported at Schema level */ @Test public void testListSchemasAtSchemaLevel() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy( () -> snowflakeClient.listSchemas(SnowflakeIdentifier.ofSchema("DB_1", "SCHEMA_2"))) .withMessageContaining("Unsupported scope type for listSchemas: SCHEMA: 'DB_1.SCHEMA_2'"); } /** * Any unexpected SQLException from the underlying connection will propagate out as an * UncheckedSQLException when listing schemas if there is no error code. */ @SuppressWarnings("unchecked") @Test public void testListSchemasSQLExceptionWithoutErrorCode() throws SQLException, InterruptedException { Exception injectedException = new SQLException("Fake SQL exception"); when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException); assertThatExceptionOfType(UncheckedSQLException.class) .isThrownBy(() -> snowflakeClient.listSchemas(SnowflakeIdentifier.ofDatabase("DB_1"))) .withMessageContaining("Failed to list schemas for scope 'DATABASE: 'DB_1''") .withCause(injectedException); } /** * Any unexpected InterruptedException from the underlying connection will propagate out as an * UncheckedInterruptedException when listing schemas. */ @SuppressWarnings("unchecked") @Test public void testListSchemasInterruptedException() throws SQLException, InterruptedException { Exception injectedException = new InterruptedException("Fake interrupted exception"); when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException); assertThatExceptionOfType(UncheckedInterruptedException.class) .isThrownBy(() -> snowflakeClient.listSchemas(SnowflakeIdentifier.ofDatabase("DB_1"))) .withMessageContaining("Interrupted while listing schemas for scope 'DATABASE: 'DB_1''") .withCause(injectedException); } /** * For the root/empty scope, expect an underlying query to list tables at the ACCOUNT level with * no query parameters. */ @SuppressWarnings("unchecked") @Test public void testListIcebergTablesInAccount() throws SQLException { when(mockResultSet.next()) .thenReturn(true) .thenReturn(true) .thenReturn(true) .thenReturn(true) .thenReturn(false); when(mockResultSet.getString("database_name")) .thenReturn("DB_1") .thenReturn("DB_1") .thenReturn("DB_1") .thenReturn("DB_2"); when(mockResultSet.getString("schema_name")) .thenReturn("SCHEMA_1") .thenReturn("SCHEMA_1") .thenReturn("SCHEMA_2") .thenReturn("SCHEMA_3"); when(mockResultSet.getString("name")) .thenReturn("TABLE_1") .thenReturn("TABLE_2") .thenReturn("TABLE_3") .thenReturn("TABLE_4"); List<SnowflakeIdentifier> actualList = snowflakeClient.listIcebergTables(SnowflakeIdentifier.ofRoot()); verify(mockQueryHarness) .query( eq(mockConnection), eq("SHOW ICEBERG TABLES IN ACCOUNT"), any(JdbcSnowflakeClient.ResultSetParser.class), eq(null)); assertThat(actualList) .containsExactly( SnowflakeIdentifier.ofTable("DB_1", "SCHEMA_1", "TABLE_1"), SnowflakeIdentifier.ofTable("DB_1", "SCHEMA_1", "TABLE_2"), SnowflakeIdentifier.ofTable("DB_1", "SCHEMA_2", "TABLE_3"), SnowflakeIdentifier.ofTable("DB_2", "SCHEMA_3", "TABLE_4")); } /** * For a DATABASE scope, expect an underlying query to list tables at the DATABASE level and * supply the database as a query param in an IDENTIFIER. */ @SuppressWarnings("unchecked") @Test public void testListIcebergTablesInDatabase() throws SQLException { when(mockResultSet.next()).thenReturn(true).thenReturn(true).thenReturn(true).thenReturn(false); when(mockResultSet.getString("database_name")) .thenReturn("DB_1") .thenReturn("DB_1") .thenReturn("DB_1"); when(mockResultSet.getString("schema_name")) .thenReturn("SCHEMA_1") .thenReturn("SCHEMA_1") .thenReturn("SCHEMA_2"); when(mockResultSet.getString("name")) .thenReturn("TABLE_1") .thenReturn("TABLE_2") .thenReturn("TABLE_3"); List<SnowflakeIdentifier> actualList = snowflakeClient.listIcebergTables(SnowflakeIdentifier.ofDatabase("DB_1")); verify(mockQueryHarness) .query( eq(mockConnection), eq("SHOW ICEBERG TABLES IN DATABASE IDENTIFIER(?)"), any(JdbcSnowflakeClient.ResultSetParser.class), eq("DB_1")); assertThat(actualList) .containsExactly( SnowflakeIdentifier.ofTable("DB_1", "SCHEMA_1", "TABLE_1"), SnowflakeIdentifier.ofTable("DB_1", "SCHEMA_1", "TABLE_2"), SnowflakeIdentifier.ofTable("DB_1", "SCHEMA_2", "TABLE_3")); } /** * For a SCHEMA scope, expect an underlying query to list tables at the SCHEMA level and supply * the schema as a query param in an IDENTIFIER. */ @SuppressWarnings("unchecked") @Test public void testListIcebergTablesInSchema() throws SQLException { when(mockResultSet.next()).thenReturn(true).thenReturn(true).thenReturn(false); when(mockResultSet.getString("database_name")).thenReturn("DB_1").thenReturn("DB_1"); when(mockResultSet.getString("schema_name")).thenReturn("SCHEMA_1").thenReturn("SCHEMA_1"); when(mockResultSet.getString("name")).thenReturn("TABLE_1").thenReturn("TABLE_2"); List<SnowflakeIdentifier> actualList = snowflakeClient.listIcebergTables(SnowflakeIdentifier.ofSchema("DB_1", "SCHEMA_1")); verify(mockQueryHarness) .query( eq(mockConnection), eq("SHOW ICEBERG TABLES IN SCHEMA IDENTIFIER(?)"), any(JdbcSnowflakeClient.ResultSetParser.class), eq("DB_1.SCHEMA_1")); assertThat(actualList) .containsExactly( SnowflakeIdentifier.ofTable("DB_1", "SCHEMA_1", "TABLE_1"), SnowflakeIdentifier.ofTable("DB_1", "SCHEMA_1", "TABLE_2")); } /** * Any unexpected SQLException from the underlying connection will propagate out as an * UncheckedSQLException when listing tables at Root level */ @SuppressWarnings("unchecked") @Test public void testListIcebergTablesSQLExceptionAtRootLevel() throws SQLException, InterruptedException { Exception injectedException = new SQLException(String.format("SQL exception with Error Code %d", 0), "2000", 0, null); when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException); assertThatExceptionOfType(UncheckedSQLException.class) .isThrownBy(() -> snowflakeClient.listIcebergTables(SnowflakeIdentifier.ofRoot())) .withMessageContaining("Failed to list tables for scope 'ROOT: '''") .withCause(injectedException); } /** * Any unexpected SQLException with specific error codes from the underlying connection will * propagate out as a NoSuchNamespaceException when listing tables at Database level */ @SuppressWarnings("unchecked") @Test public void testListIcebergTablesSQLExceptionAtDatabaseLevel() throws SQLException, InterruptedException { for (Integer errorCode : DATABASE_NOT_FOUND_ERROR_CODES) { Exception injectedException = new SQLException( String.format("SQL exception with Error Code %d", errorCode), "2000", errorCode, null); when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException); assertThatExceptionOfType(NoSuchNamespaceException.class) .isThrownBy( () -> snowflakeClient.listIcebergTables(SnowflakeIdentifier.ofDatabase("DB_1"))) .withMessageContaining( String.format( "Identifier not found: 'DATABASE: 'DB_1''. Underlying exception: 'SQL exception with Error Code %d'", errorCode)) .withCause(injectedException); } } /** * Any unexpected SQLException with specific error codes from the underlying connection will * propagate out as a NoSuchNamespaceException when listing tables at Schema level */ @SuppressWarnings("unchecked") @Test public void testListIcebergTablesSQLExceptionAtSchemaLevel() throws SQLException, InterruptedException { for (Integer errorCode : SCHEMA_NOT_FOUND_ERROR_CODES) { Exception injectedException = new SQLException( String.format("SQL exception with Error Code %d", errorCode), "2000", errorCode, null); when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException); assertThatExceptionOfType(NoSuchNamespaceException.class) .isThrownBy( () -> snowflakeClient.listIcebergTables( SnowflakeIdentifier.ofSchema("DB_1", "SCHEMA_1"))) .withMessageContaining( String.format( "Identifier not found: 'SCHEMA: 'DB_1.SCHEMA_1''. Underlying exception: 'SQL exception with Error Code %d'", errorCode)) .withCause(injectedException); } } /** * Any unexpected SQLException without error code from the underlying connection will propagate * out as an UncheckedSQLException when listing tables. */ @SuppressWarnings("unchecked") @Test public void testListIcebergTablesSQLExceptionWithoutErrorCode() throws SQLException, InterruptedException { Exception injectedException = new SQLException("Fake SQL exception"); when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException); assertThatExceptionOfType(UncheckedSQLException.class) .isThrownBy(() -> snowflakeClient.listIcebergTables(SnowflakeIdentifier.ofDatabase("DB_1"))) .withMessageContaining("Failed to list tables for scope 'DATABASE: 'DB_1''") .withCause(injectedException); } /** * Any unexpected InterruptedException from the underlying connection will propagate out as an * UncheckedInterruptedException when listing tables. */ @SuppressWarnings("unchecked") @Test public void testListIcebergTablesInterruptedException() throws SQLException, InterruptedException { Exception injectedException = new InterruptedException("Fake interrupted exception"); when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException); assertThatExceptionOfType(UncheckedInterruptedException.class) .isThrownBy(() -> snowflakeClient.listIcebergTables(SnowflakeIdentifier.ofDatabase("DB_1"))) .withMessageContaining("Interrupted while listing tables for scope 'DATABASE: 'DB_1''") .withCause(injectedException); } /** * Test parsing of table metadata JSON from a GET_ICEBERG_TABLE_INFORMATION call, with the S3 path * unaltered between snowflake/iceberg path representations. */ @SuppressWarnings("unchecked") @Test public void testGetS3TableMetadata() throws SQLException { when(mockResultSet.next()).thenReturn(true); when(mockResultSet.getString("METADATA")) .thenReturn( "{\"metadataLocation\":\"s3://tab1/metadata/v3.metadata.json\",\"status\":\"success\"}"); SnowflakeTableMetadata actualMetadata = snowflakeClient.loadTableMetadata( SnowflakeIdentifier.ofTable("DB_1", "SCHEMA_1", "TABLE_1")); verify(mockQueryHarness) .query( eq(mockConnection), eq("SELECT SYSTEM$GET_ICEBERG_TABLE_INFORMATION(?) AS METADATA"), any(JdbcSnowflakeClient.ResultSetParser.class), eq("DB_1.SCHEMA_1.TABLE_1")); SnowflakeTableMetadata expectedMetadata = new SnowflakeTableMetadata( "s3://tab1/metadata/v3.metadata.json", "s3://tab1/metadata/v3.metadata.json", "success", null); assertThat(actualMetadata).isEqualTo(expectedMetadata); } /** * Test parsing of table metadata JSON from a GET_ICEBERG_TABLE_INFORMATION call, with the Azure * path translated from an azure:// format to a wasbs:// format. */ @SuppressWarnings("unchecked") @Test public void testGetAzureTableMetadata() throws SQLException { when(mockResultSet.next()).thenReturn(true); when(mockResultSet.getString("METADATA")) .thenReturn( "{\"metadataLocation\":\"azure://myaccount.blob.core.windows.net/mycontainer/tab3/metadata/v334.metadata.json\",\"status\":\"success\"}"); SnowflakeTableMetadata actualMetadata = snowflakeClient.loadTableMetadata( SnowflakeIdentifier.ofTable("DB_1", "SCHEMA_1", "TABLE_1")); verify(mockQueryHarness) .query( eq(mockConnection), eq("SELECT SYSTEM$GET_ICEBERG_TABLE_INFORMATION(?) AS METADATA"), any(JdbcSnowflakeClient.ResultSetParser.class), eq("DB_1.SCHEMA_1.TABLE_1")); SnowflakeTableMetadata expectedMetadata = new SnowflakeTableMetadata( "azure://myaccount.blob.core.windows.net/mycontainer/tab3/metadata/v334.metadata.json", "wasbs://mycontainer@myaccount.blob.core.windows.net/tab3/metadata/v334.metadata.json", "success", null); assertThat(actualMetadata).isEqualTo(expectedMetadata); } /** * Test parsing of table metadata JSON from a GET_ICEBERG_TABLE_INFORMATION call, with the GCS * path translated from a gcs:// format to a gs:// format. */ @SuppressWarnings("unchecked") @Test public void testGetGcsTableMetadata() throws SQLException { when(mockResultSet.next()).thenReturn(true); when(mockResultSet.getString("METADATA")) .thenReturn( "{\"metadataLocation\":\"gcs://tab5/metadata/v793.metadata.json\",\"status\":\"success\"}"); SnowflakeTableMetadata actualMetadata = snowflakeClient.loadTableMetadata( SnowflakeIdentifier.ofTable("DB_1", "SCHEMA_1", "TABLE_1")); verify(mockQueryHarness) .query( eq(mockConnection), eq("SELECT SYSTEM$GET_ICEBERG_TABLE_INFORMATION(?) AS METADATA"), any(JdbcSnowflakeClient.ResultSetParser.class), eq("DB_1.SCHEMA_1.TABLE_1")); SnowflakeTableMetadata expectedMetadata = new SnowflakeTableMetadata( "gcs://tab5/metadata/v793.metadata.json", "gs://tab5/metadata/v793.metadata.json", "success", null); assertThat(actualMetadata).isEqualTo(expectedMetadata); } /** Malformed JSON from a ResultSet should propagate as an IllegalArgumentException. */ @Test public void testGetTableMetadataMalformedJson() throws SQLException { when(mockResultSet.next()).thenReturn(true); when(mockResultSet.getString("METADATA")).thenReturn("{\"malformed_no_closing_bracket"); assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy( () -> snowflakeClient.loadTableMetadata( SnowflakeIdentifier.ofTable("DB_1", "SCHEMA_1", "TABLE_1"))) .withMessageContaining("{\"malformed_no_closing_bracket"); } /** * Any unexpected SQLException with specific error codes from the underlying connection will * propagate out as a NoSuchTableException when getting table metadata. */ @SuppressWarnings("unchecked") @Test public void testGetTableMetadataSQLException() throws SQLException, InterruptedException { for (Integer errorCode : TABLE_NOT_FOUND_ERROR_CODES) { Exception injectedException = new SQLException( String.format("SQL exception with Error Code %d", errorCode), "2000", errorCode, null); when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException); assertThatExceptionOfType(NoSuchTableException.class) .isThrownBy( () -> snowflakeClient.loadTableMetadata( SnowflakeIdentifier.ofTable("DB_1", "SCHEMA_1", "TABLE_1"))) .withMessageContaining( String.format( "Identifier not found: 'TABLE: 'DB_1.SCHEMA_1.TABLE_1''. Underlying exception: 'SQL exception with Error Code %d'", errorCode)) .withCause(injectedException); } } /** * Any unexpected SQLException from the underlying connection will propagate out as an * UncheckedSQLException when getting table metadata. */ @SuppressWarnings("unchecked") @Test public void testGetTableMetadataSQLExceptionWithoutErrorCode() throws SQLException, InterruptedException { Exception injectedException = new SQLException("Fake SQL exception"); when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException); assertThatExceptionOfType(UncheckedSQLException.class) .isThrownBy( () -> snowflakeClient.loadTableMetadata( SnowflakeIdentifier.ofTable("DB_1", "SCHEMA_1", "TABLE_1"))) .withMessageContaining("Failed to get table metadata for 'TABLE: 'DB_1.SCHEMA_1.TABLE_1''") .withCause(injectedException); } /** * Any unexpected InterruptedException from the underlying connection will propagate out as an * UncheckedInterruptedException when getting table metadata. */ @SuppressWarnings("unchecked") @Test public void testGetTableMetadataInterruptedException() throws SQLException, InterruptedException { Exception injectedException = new InterruptedException("Fake interrupted exception"); when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException); assertThatExceptionOfType(UncheckedInterruptedException.class) .isThrownBy( () -> snowflakeClient.loadTableMetadata( SnowflakeIdentifier.ofTable("DB_1", "SCHEMA_1", "TABLE_1"))) .withMessageContaining( "Interrupted while getting table metadata for 'TABLE: 'DB_1.SCHEMA_1.TABLE_1''") .withCause(injectedException); } /** Calling close() propagates to closing underlying client pool. */ @Test public void testClose() { snowflakeClient.close(); verify(mockClientPool).close(); } }
apache/lucene
36,440
lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyWriter.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.lucene.facet.taxonomy.directory; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiConsumer; import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.StringField; import org.apache.lucene.facet.FacetsConfig; import org.apache.lucene.facet.taxonomy.FacetLabel; import org.apache.lucene.facet.taxonomy.ParallelTaxonomyArrays; import org.apache.lucene.facet.taxonomy.TaxonomyReader; import org.apache.lucene.facet.taxonomy.TaxonomyWriter; import org.apache.lucene.facet.taxonomy.writercache.LruTaxonomyWriterCache; import org.apache.lucene.facet.taxonomy.writercache.TaxonomyWriterCache; import org.apache.lucene.index.CorruptIndexException; // javadocs import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexWriterConfig.OpenMode; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LogByteSizeMergePolicy; import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.ReaderManager; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TieredMergePolicy; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.Directory; import org.apache.lucene.store.LockObtainFailedException; import org.apache.lucene.util.BytesRef; /** * {@link TaxonomyWriter} which uses a {@link Directory} to store the taxonomy information on disk, * and keeps an additional in-memory cache of some or all categories. * * <p>In addition to the permanently-stored information in the {@link Directory}, efficiency * dictates that we also keep an in-memory cache of <B>recently seen</B> or <B>all</B> categories, * so that we do not need to go back to disk for every category addition to see which ordinal this * category already has, if any. A {@link TaxonomyWriterCache} object determines the specific * caching algorithm used. * * <p>This class offers some hooks for extending classes to control the {@link IndexWriter} instance * that is used. See {@link #openIndexWriter}. * * @lucene.experimental */ public class DirectoryTaxonomyWriter implements TaxonomyWriter { /** * Property name of user commit data that contains the index epoch. The epoch changes whenever the * taxonomy is recreated (i.e. opened with {@link OpenMode#CREATE}. * * <p>Applications should not use this property in their commit data because it will be overridden * by this taxonomy writer. */ public static final String INDEX_EPOCH = "index.epoch"; private static final int DEFAULT_CACHE_SIZE = 4000; private final Directory dir; private final IndexWriter indexWriter; private final TaxonomyWriterCache cache; private final AtomicInteger cacheMisses = new AtomicInteger(0); private final AtomicInteger nextID = new AtomicInteger(0); private final Field fullPathField; // Records the taxonomy index epoch, updated on replaceTaxonomy as well. private long indexEpoch; private int cacheMissesUntilFill = 11; private boolean shouldFillCache = true; // even though lazily initialized, not volatile so that access to it is // faster. we keep a volatile boolean init instead. private ReaderManager readerManager; private volatile boolean initializedReaderManager = false; private volatile boolean shouldRefreshReaderManager; /** * We call the cache "complete" if we know that every category in our taxonomy is in the cache. * When the cache is <B>not</B> complete, and we can't find a category in the cache, we still need * to look for it in the on-disk index; Therefore when the cache is not complete, we need to open * a "reader" to the taxonomy index. The cache becomes incomplete if it was never filled with the * existing categories, or if a put() to the cache ever returned true (meaning that some cached * data was cleared). */ private volatile boolean cacheIsComplete; private volatile boolean isClosed = false; private volatile TaxonomyIndexArrays taxoArrays; /** * Construct a Taxonomy writer. * * @param directory The {@link Directory} in which to store the taxonomy. Note that the taxonomy * is written directly to that directory (not to a subdirectory of it). * @param openMode Specifies how to open a taxonomy for writing: <code>APPEND</code> means open an * existing index for append (failing if the index does not yet exist). <code>CREATE</code> * means create a new index (first deleting the old one if it already existed). <code> * APPEND_OR_CREATE</code> appends to an existing index if there is one, otherwise it creates * a new index. * @param cache A {@link TaxonomyWriterCache} implementation which determines the in-memory * caching policy. See for example {@link LruTaxonomyWriterCache}. If null or missing, {@link * #defaultTaxonomyWriterCache()} is used. * @throws CorruptIndexException if the taxonomy is corrupted. * @throws LockObtainFailedException if the taxonomy is locked by another writer. * @throws IOException if another error occurred. */ public DirectoryTaxonomyWriter(Directory directory, OpenMode openMode, TaxonomyWriterCache cache) throws IOException { dir = directory; IndexWriterConfig config = createIndexWriterConfig(openMode); indexWriter = openIndexWriter(dir, config); // verify (to some extent) that merge policy in effect would preserve category docids assert !(indexWriter.getConfig().getMergePolicy() instanceof TieredMergePolicy) : "for preserving category docids, merging none-adjacent segments is not allowed"; // after we opened the writer, and the index is locked, it's safe to check // the commit data and read the index epoch openMode = config.getOpenMode(); if (DirectoryReader.indexExists(directory) == false) { indexEpoch = 1; } else { String epochStr = null; SegmentInfos infos = SegmentInfos.readLatestCommit(dir); Map<String, String> commitData = infos.getUserData(); if (commitData != null) { epochStr = commitData.get(INDEX_EPOCH); } // no commit data, or no epoch in it means an old taxonomy, so set its epoch to 1, for lack // of a better value. indexEpoch = epochStr == null ? 1 : Long.parseLong(epochStr, 16); } if (openMode == OpenMode.CREATE) { ++indexEpoch; } fullPathField = new StringField(Consts.FULL, "", Field.Store.NO); nextID.set(indexWriter.getDocStats().maxDoc); if (cache == null) { cache = defaultTaxonomyWriterCache(); } this.cache = cache; if (nextID.get() == 0) { cacheIsComplete = true; // Make sure that the taxonomy always contain the root category // with category id 0. addCategory(new FacetLabel()); } else { // There are some categories on the disk, which we have not yet // read into the cache, and therefore the cache is incomplete. // We choose not to read all the categories into the cache now, // to avoid terrible performance when a taxonomy index is opened // to add just a single category. We will do it later, after we // notice a few cache misses. cacheIsComplete = false; } } /** Returns the {@link TaxonomyWriterCache} in use by this writer. */ public TaxonomyWriterCache getCache() { return cache; } /** * Open internal index writer, which contains the taxonomy data. * * <p>Extensions may provide their own {@link IndexWriter} implementation or instance. <br> * <b>NOTE:</b> the instance this method returns will be closed upon calling to {@link #close()}. * <br> * <b>NOTE:</b> the merge policy in effect must not merge none adjacent segments. See comment in * {@link #createIndexWriterConfig(IndexWriterConfig.OpenMode)} for the logic behind this. * * @see #createIndexWriterConfig(IndexWriterConfig.OpenMode) * @param directory the {@link Directory} on top of which an {@link IndexWriter} should be opened. * @param config configuration for the internal index writer. */ protected IndexWriter openIndexWriter(Directory directory, IndexWriterConfig config) throws IOException { return new IndexWriter(directory, config); } /** * Create the {@link IndexWriterConfig} that would be used for opening the internal index writer. * <br> * Extensions can configure the {@link IndexWriter} as they see fit, including setting a {@link * org.apache.lucene.index.MergeScheduler merge-scheduler}, or {@link * org.apache.lucene.index.IndexDeletionPolicy deletion-policy}, different RAM size etc.<br> * <br> * <b>NOTE:</b> internal docids of the configured index must not be altered. For that, categories * are never deleted from the taxonomy index. In addition, merge policy in effect must not merge * none adjacent segments. * * @see #openIndexWriter(Directory, IndexWriterConfig) * @param openMode see {@link OpenMode} */ protected IndexWriterConfig createIndexWriterConfig(OpenMode openMode) { // TODO: should we use a more optimized Codec? // The taxonomy has a unique structure, where each term is associated with one document // Make sure we use a MergePolicy which always merges adjacent segments and thus // keeps the doc IDs ordered as well (this is crucial for the taxonomy index). return new IndexWriterConfig(null) .setOpenMode(openMode) .setMergePolicy(new LogByteSizeMergePolicy()); } /** Opens a {@link ReaderManager} from the internal {@link IndexWriter}. */ private void initReaderManager() throws IOException { if (!initializedReaderManager) { synchronized (this) { // verify that the taxo-writer hasn't been closed on us. ensureOpen(); if (!initializedReaderManager) { readerManager = new ReaderManager(indexWriter, false, false); shouldRefreshReaderManager = false; initializedReaderManager = true; } } } } /** * Creates a new instance with a default cache as defined by {@link * #defaultTaxonomyWriterCache()}. */ public DirectoryTaxonomyWriter(Directory directory, OpenMode openMode) throws IOException { this(directory, openMode, defaultTaxonomyWriterCache()); } /** * Defines the default {@link TaxonomyWriterCache} to use in constructors which do not specify * one. * * <p>The current default is {@link LruTaxonomyWriterCache} */ public static TaxonomyWriterCache defaultTaxonomyWriterCache() { return new LruTaxonomyWriterCache(DEFAULT_CACHE_SIZE); } /** Create this with {@code OpenMode.CREATE_OR_APPEND}. */ public DirectoryTaxonomyWriter(Directory d) throws IOException { this(d, OpenMode.CREATE_OR_APPEND); } /** * Frees used resources as well as closes the underlying {@link IndexWriter}, which commits * whatever changes made to it to the underlying {@link Directory}. */ @Override public synchronized void close() throws IOException { if (!isClosed) { commit(); indexWriter.close(); doClose(); } } private void doClose() throws IOException { isClosed = true; closeResources(); } /** * A hook for extending classes to close additional resources that were used. The default * implementation closes the {@link IndexReader} as well as the {@link TaxonomyWriterCache} * instances that were used. <br> * <b>NOTE:</b> if you override this method, you should include a <code>super.closeResources() * </code> call in your implementation. */ protected synchronized void closeResources() throws IOException { if (initializedReaderManager) { readerManager.close(); readerManager = null; initializedReaderManager = false; } if (cache != null) { cache.close(); } } /** * Look up the given category in the cache and/or the on-disk storage, returning the category's * ordinal, or a negative number in case the category does not yet exist in the taxonomy. */ protected synchronized int findCategory(FacetLabel categoryPath) throws IOException { // If we can find the category in the cache, or we know the cache is // complete, we can return the response directly from it int res = cache.get(categoryPath); if (res >= 0 || cacheIsComplete) { return res; } cacheMisses.incrementAndGet(); // After a few cache misses, it makes sense to read all the categories // from disk and into the cache. The reason not to do this on the first // cache miss (or even when opening the writer) is that it will // significantly slow down the case when a taxonomy is opened just to // add one category. The idea only spending a long time on reading // after enough time was spent on cache misses is known as an "online // algorithm". perhapsFillCache(); res = cache.get(categoryPath); if (res >= 0 || cacheIsComplete) { // if after filling the cache from the info on disk, the category is in it // or the cache is complete, return whatever cache.get returned. return res; } // if we get here, it means the category is not in the cache, and it is not // complete, and therefore we must look for the category on disk. // We need to get an answer from the on-disk index. initReaderManager(); int doc = -1; DirectoryReader reader = readerManager.acquire(); try { final BytesRef catTerm = new BytesRef(FacetsConfig.pathToString(categoryPath.components, categoryPath.length)); PostingsEnum docs = null; // reuse for (LeafReaderContext ctx : reader.leaves()) { Terms terms = Terms.getTerms(ctx.reader(), Consts.FULL); // TODO: share per-segment TermsEnum here! TermsEnum termsEnum = terms.iterator(); if (termsEnum.seekExact(catTerm)) { // liveDocs=null because the taxonomy has no deletes docs = termsEnum.postings(docs, 0 /* freqs not required */); // if the term was found, we know it has exactly one document. doc = docs.nextDoc() + ctx.docBase; break; } } } finally { readerManager.release(reader); } if (doc > 0) { addToCache(categoryPath, doc); } return doc; } @Override public int addCategory(FacetLabel categoryPath) throws IOException { ensureOpen(); // check the cache outside the synchronized block. this results in better // concurrency when categories are there. int res = cache.get(categoryPath); if (res < 0) { // the category is not in the cache - following code cannot be executed in parallel. synchronized (this) { res = findCategory(categoryPath); if (res < 0) { // This is a new category, and we need to insert it into the index // (and the cache). Actually, we might also need to add some of // the category's ancestors before we can add the category itself // (while keeping the invariant that a parent is always added to // the taxonomy before its child). internalAddCategory() does all // this recursively res = internalAddCategory(categoryPath); } } } return res; } /** * Add a new category into the index (and the cache), and return its new ordinal. * * <p>Actually, we might also need to add some of the category's ancestors before we can add the * category itself (while keeping the invariant that a parent is always added to the taxonomy * before its child). We do this by recursion. */ private int internalAddCategory(FacetLabel cp) throws IOException { // Find our parent's ordinal (recursively adding the parent category // to the taxonomy if it's not already there). Then add the parent // ordinal as payloads (rather than a stored field; payloads can be // more efficiently read into memory in bulk by LuceneTaxonomyReader) int parent; if (cp.length > 1) { FacetLabel parentPath = cp.subpath(cp.length - 1); parent = findCategory(parentPath); if (parent < 0) { parent = internalAddCategory(parentPath); } } else if (cp.length == 1) { parent = TaxonomyReader.ROOT_ORDINAL; } else { parent = TaxonomyReader.INVALID_ORDINAL; } return addCategoryDocument(cp, parent); } /** * Verifies that this instance wasn't closed, or throws {@link AlreadyClosedException} if it is. */ protected final void ensureOpen() { if (isClosed) { throw new AlreadyClosedException("The taxonomy writer has already been closed"); } } /** * Child classes can implement this method to modify the document corresponding to a category path * before indexing it. * * @lucene.experimental */ protected void enrichOrdinalDocument(Document d, FacetLabel categoryPath) {} /** * Note that the methods calling addCategoryDocument() are synchronized, so this method is * effectively synchronized as well. */ private int addCategoryDocument(FacetLabel categoryPath, int parent) throws IOException { Document d = new Document(); /* Lucene 9 switches to NumericDocValuesField for storing parent ordinals */ d.add(new NumericDocValuesField(Consts.FIELD_PARENT_ORDINAL_NDV, parent)); String fieldPath = FacetsConfig.pathToString(categoryPath.components, categoryPath.length); fullPathField.setStringValue(fieldPath); /* Lucene 9 switches to BinaryDocValuesField for storing taxonomy categories */ d.add(new BinaryDocValuesField(Consts.FULL, new BytesRef(fieldPath))); d.add(fullPathField); // add arbitrary ordinal data to the doc enrichOrdinalDocument(d, categoryPath); indexWriter.addDocument(d); int id = nextID.getAndIncrement(); // added a category document, mark that ReaderManager is not up-to-date shouldRefreshReaderManager = true; // also add to the parent array taxoArrays = getTaxoArrays().add(id, parent); // NOTE: this line must be executed last, or else the cache gets updated // before the parents array (LUCENE-4596) addToCache(categoryPath, id); return id; } private void addToCache(FacetLabel categoryPath, int id) throws IOException { if (cache.put(categoryPath, id)) { // If cache.put() returned true, it means the cache was limited in // size, became full, and parts of it had to be evicted. It is // possible that a relatively-new category that isn't yet visible // to our 'reader' was evicted, and therefore we must now refresh // the reader. refreshReaderManager(); cacheIsComplete = false; } } private synchronized void refreshReaderManager() throws IOException { // this method is synchronized since it cannot happen concurrently with // addCategoryDocument -- when this method returns, we must know that the // reader manager's state is current. also, it sets shouldRefresh to false, // and this cannot overlap with addCatDoc too. // NOTE: since this method is synced, it can call maybeRefresh, instead of // maybeRefreshBlocking. If ever this is changed, make sure to change the // call too. if (shouldRefreshReaderManager && initializedReaderManager) { readerManager.maybeRefresh(); shouldRefreshReaderManager = false; } } @Override public synchronized long commit() throws IOException { ensureOpen(); // LUCENE-4972: if we always call setCommitData, we create empty commits Map<String, String> data = new HashMap<>(); Iterable<Map.Entry<String, String>> iter = indexWriter.getLiveCommitData(); if (iter != null) { for (Map.Entry<String, String> ent : iter) { data.put(ent.getKey(), ent.getValue()); } } String epochStr = data.get(INDEX_EPOCH); if (epochStr == null || Long.parseLong(epochStr, 16) != indexEpoch) { indexWriter.setLiveCommitData(combinedCommitData(indexWriter.getLiveCommitData())); } return indexWriter.commit(); } /** Combine original user data with the taxonomy epoch. */ private Iterable<Map.Entry<String, String>> combinedCommitData( Iterable<Map.Entry<String, String>> commitData) { Map<String, String> m = new HashMap<>(); if (commitData != null) { for (Map.Entry<String, String> ent : commitData) { m.put(ent.getKey(), ent.getValue()); } } m.put(INDEX_EPOCH, Long.toString(indexEpoch, 16)); return m.entrySet(); } @Override public void setLiveCommitData(Iterable<Map.Entry<String, String>> commitUserData) { indexWriter.setLiveCommitData(combinedCommitData(commitUserData)); } @Override public Iterable<Map.Entry<String, String>> getLiveCommitData() { return combinedCommitData(indexWriter.getLiveCommitData()); } /** * prepare most of the work needed for a two-phase commit. See {@link IndexWriter#prepareCommit}. */ @Override public synchronized long prepareCommit() throws IOException { ensureOpen(); // LUCENE-4972: if we always call setCommitData, we create empty commits Map<String, String> data = new HashMap<>(); Iterable<Map.Entry<String, String>> iter = indexWriter.getLiveCommitData(); if (iter != null) { for (Map.Entry<String, String> ent : iter) { data.put(ent.getKey(), ent.getValue()); } } String epochStr = data.get(INDEX_EPOCH); if (epochStr == null || Long.parseLong(epochStr, 16) != indexEpoch) { indexWriter.setLiveCommitData(combinedCommitData(indexWriter.getLiveCommitData())); } return indexWriter.prepareCommit(); } @Override public int getSize() { ensureOpen(); return nextID.get(); } /** * Set the number of cache misses before an attempt is made to read the entire taxonomy into the * in-memory cache. * * <p>This taxonomy writer holds an in-memory cache of recently seen categories to speed up * operation. On each cache-miss, the on-disk index needs to be consulted. When an existing * taxonomy is opened, a lot of slow disk reads like that are needed until the cache is filled, so * it is more efficient to read the entire taxonomy into memory at once. We do this complete read * after a certain number (defined by this method) of cache misses. * * <p>If the number is set to {@code 0}, the entire taxonomy is read into the cache on first use, * without fetching individual categories first. * * <p>NOTE: it is assumed that this method is called immediately after the taxonomy writer has * been created. */ public void setCacheMissesUntilFill(int i) { ensureOpen(); cacheMissesUntilFill = i; } // we need to guarantee that if several threads call this concurrently, only // one executes it, and after it returns, the cache is updated and is either // complete or not. private synchronized void perhapsFillCache() throws IOException { if (cacheMisses.get() < cacheMissesUntilFill) { return; } if (!shouldFillCache) { // we already filled the cache once, there's no need to re-fill it return; } shouldFillCache = false; initReaderManager(); boolean aborted = false; DirectoryReader reader = readerManager.acquire(); try { PostingsEnum postingsEnum = null; for (LeafReaderContext ctx : reader.leaves()) { Terms terms = Terms.getTerms(ctx.reader(), Consts.FULL); // TODO: share per-segment TermsEnum here! TermsEnum termsEnum = terms.iterator(); while (termsEnum.next() != null) { if (!cache.isFull()) { BytesRef t = termsEnum.term(); // Since we guarantee uniqueness of categories, each term has exactly // one document. Also, since we do not allow removing categories (and // hence documents), there are no deletions in the index. Therefore, it // is sufficient to call next(), and then doc(), exactly once with no // 'validation' checks. FacetLabel cp = new FacetLabel(FacetsConfig.stringToPath(t.utf8ToString())); postingsEnum = termsEnum.postings(postingsEnum, PostingsEnum.NONE); boolean res = cache.put(cp, postingsEnum.nextDoc() + ctx.docBase); assert !res : "entries should not have been evicted from the cache"; } else { // the cache is full and the next put() will evict entries from it, therefore abort // the iteration. aborted = true; break; } } if (aborted) { break; } } } finally { readerManager.release(reader); } cacheIsComplete = !aborted; if (cacheIsComplete) { synchronized (this) { // everything is in the cache, so no need to keep readerManager open. // this block is executed in a sync block so that it works well with // initReaderManager called in parallel. readerManager.close(); readerManager = null; initializedReaderManager = false; } } } private TaxonomyIndexArrays getTaxoArrays() throws IOException { // By copying to a local variable we only perform a volatile read once (if it's not null) TaxonomyIndexArrays arrays = taxoArrays; if (arrays == null) { synchronized (this) { arrays = taxoArrays; if (arrays == null) { initReaderManager(); DirectoryReader reader = readerManager.acquire(); try { arrays = new TaxonomyIndexArrays(reader); } finally { readerManager.release(reader); } taxoArrays = arrays; } } } return arrays; } @Override public int getParent(int ordinal) throws IOException { ensureOpen(); // Note: the following if() just enforces that a user can never ask // for the parent of a nonexistent category - even if the parent array // was allocated bigger than it really needs to be. Objects.checkIndex(ordinal, nextID.get()); ParallelTaxonomyArrays.IntArray parents = getTaxoArrays().parents(); assert ordinal < parents.length() : "requested ordinal (" + ordinal + "); parents.length (" + parents.length() + ") !"; return parents.get(ordinal); } /** * Takes the categories from the given taxonomy directory, and adds the missing ones to this * taxonomy. Additionally, it fills the given {@link OrdinalMap} with a mapping from the original * ordinal to the new ordinal. */ public void addTaxonomy(Directory taxoDir, OrdinalMap map) throws IOException { ensureOpen(); try (DirectoryReader r = DirectoryReader.open(taxoDir)) { final int size = r.numDocs(); map.setSize(size); int base = 0; PostingsEnum docs = null; for (final LeafReaderContext ctx : r.leaves()) { final LeafReader ar = ctx.reader(); final Terms terms = ar.terms(Consts.FULL); // TODO: share per-segment TermsEnum here! TermsEnum te = terms.iterator(); while (te.next() != null) { FacetLabel cp = new FacetLabel(FacetsConfig.stringToPath(te.term().utf8ToString())); final int ordinal = addCategory(cp); docs = te.postings(docs, PostingsEnum.NONE); map.addMapping(docs.nextDoc() + base, ordinal); } base += ar.maxDoc(); // no deletions, so we're ok } map.addDone(); } } /** * Mapping from old ordinal to new ordinals, used when merging indexes with separate taxonomies. * * <p>addToTaxonomies() merges one or more taxonomies into the given taxonomy (this). An * OrdinalMap is filled for each of the added taxonomies, containing the new ordinal (in the * merged taxonomy) of each of the categories in the old taxonomy. * * <p>There exist two implementations of OrdinalMap: MemoryOrdinalMap and DiskOrdinalMap. As their * names suggest, the former keeps the map in memory and the latter in a temporary disk file. * Because these maps will later be needed one by one (to remap the counting lists), not all at * the same time, it is recommended to put the first taxonomy's map in memory, and all the rest on * disk (later to be automatically read into memory one by one, when needed). */ public interface OrdinalMap { /** * Set the size of the map. This MUST be called before addMapping(). It is assumed (but not * verified) that addMapping() will then be called exactly 'size' times, with different * origOrdinals between 0 and size-1. */ void setSize(int size) throws IOException; /** Record a mapping. */ void addMapping(int origOrdinal, int newOrdinal) throws IOException; /** * Call addDone() to say that all addMapping() have been done. In some implementations this * might free some resources. */ void addDone() throws IOException; /** * Return the map from the taxonomy's original (consecutive) ordinals to the new taxonomy's * ordinals. If the map has to be read from disk and ordered appropriately, it is done when * getMap() is called. getMap() should only be called once, and only when the map is actually * needed. Calling it will also free all resources that the map might be holding (such as * temporary disk space), other than the returned int[]. */ int[] getMap() throws IOException; } /** {@link OrdinalMap} maintained in memory */ public static final class MemoryOrdinalMap implements OrdinalMap { int[] map; /** Sole constructor. */ public MemoryOrdinalMap() {} @Override public void setSize(int taxonomySize) { map = new int[taxonomySize]; } @Override public void addMapping(int origOrdinal, int newOrdinal) { map[origOrdinal] = newOrdinal; } @Override public void addDone() { /* nothing to do */ } @Override public int[] getMap() { return map; } } /** {@link OrdinalMap} maintained on file system */ public static final class DiskOrdinalMap implements OrdinalMap { private final Path mapFile; private int[] map = null; private DataOutputStream out; /** Sole constructor. */ public DiskOrdinalMap(Path mapFile) throws IOException { this.mapFile = mapFile; out = new DataOutputStream(new BufferedOutputStream(Files.newOutputStream(mapFile))); } @Override public void addMapping(int origOrdinal, int newOrdinal) throws IOException { out.writeInt(origOrdinal); out.writeInt(newOrdinal); } @Override public void setSize(int taxonomySize) throws IOException { out.writeInt(taxonomySize); } @Override public void addDone() throws IOException { if (out != null) { out.close(); out = null; } } @Override public int[] getMap() throws IOException { if (map != null) { return map; } addDone(); // in case this wasn't previously called try (DataInputStream in = new DataInputStream(new BufferedInputStream(Files.newInputStream(mapFile)))) { map = new int[in.readInt()]; // NOTE: The current code assumes that the map is complete, // i.e. that every ordinal gets exactly one value. Otherwise, // we may run into an EOF here, or not read everything. for (int i = 0; i < map.length; i++) { int origOrdinal = in.readInt(); int newOrdinal = in.readInt(); map[origOrdinal] = newOrdinal; } } // Delete the temporary file, which is no longer needed. Files.delete(mapFile); return map; } } /** * Rollback changes to the taxonomy writer and closes the instance. Following this method the * instance becomes unusable (calling any of its API methods will yield an {@link * AlreadyClosedException}). */ @Override public synchronized void rollback() throws IOException { ensureOpen(); indexWriter.rollback(); doClose(); } /** * Replaces the current taxonomy with the given one. This method should generally be called in * conjunction with {@link IndexWriter#addIndexes(Directory...)} to replace both the taxonomy and * the search index content. */ public synchronized void replaceTaxonomy(Directory taxoDir) throws IOException { // replace the taxonomy by doing IW optimized operations indexWriter.deleteAll(); indexWriter.addIndexes(taxoDir); shouldRefreshReaderManager = true; initReaderManager(); // ensure that it's initialized refreshReaderManager(); nextID.set(indexWriter.getDocStats().maxDoc); taxoArrays = null; // must nullify so that it's re-computed next time it's needed // need to clear the cache, so that addCategory won't accidentally return // old categories that are in the cache. cache.clear(); cacheIsComplete = false; shouldFillCache = true; cacheMisses.set(0); // update indexEpoch as a taxonomy replace is just like it has be recreated ++indexEpoch; } /** * Delete the taxonomy and reset all state for this writer. * * <p>To keep using the same main index, you would have to regenerate the taxonomy, taking care * that ordinals are indexed in the same order as before. An example of this can be found in * {@link ReindexingEnrichedDirectoryTaxonomyWriter#reindexWithNewOrdinalData(BiConsumer)}. * * @lucene.experimental */ synchronized void deleteAll() throws IOException { indexWriter.deleteAll(); shouldRefreshReaderManager = true; initReaderManager(); // ensure that it's initialized refreshReaderManager(); nextID.set(0); taxoArrays = null; // must nullify so that it's re-computed next time it's needed // need to clear the cache, so that addCategory won't accidentally return // old categories that are in the cache. cache.clear(); cacheIsComplete = false; shouldFillCache = true; cacheMisses.set(0); // update indexEpoch as a taxonomy replace is just like it has be recreated ++indexEpoch; } /** Returns the {@link Directory} of this taxonomy writer. */ public Directory getDirectory() { return dir; } /** * Used by {@link DirectoryTaxonomyReader} to support NRT. * * <p><b>NOTE:</b> you should not use the obtained {@link IndexWriter} in any way, other than * opening an IndexReader on it, or otherwise, the taxonomy index may become corrupt! */ final IndexWriter getInternalIndexWriter() { return indexWriter; } /** * Expert: returns current index epoch, if this is a near-real-time reader. Used by {@link * DirectoryTaxonomyReader} to support NRT. * * @lucene.internal */ public final long getTaxonomyEpoch() { return indexEpoch; } }
googleapis/google-cloud-java
36,314
java-translate/proto-google-cloud-translate-v3/src/main/java/com/google/cloud/translate/v3/BatchDocumentInputConfig.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/translate/v3/translation_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.translate.v3; /** * * * <pre> * Input configuration for BatchTranslateDocument request. * </pre> * * Protobuf type {@code google.cloud.translation.v3.BatchDocumentInputConfig} */ public final class BatchDocumentInputConfig extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.translation.v3.BatchDocumentInputConfig) BatchDocumentInputConfigOrBuilder { private static final long serialVersionUID = 0L; // Use BatchDocumentInputConfig.newBuilder() to construct. private BatchDocumentInputConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private BatchDocumentInputConfig() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new BatchDocumentInputConfig(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.translate.v3.TranslationServiceProto .internal_static_google_cloud_translation_v3_BatchDocumentInputConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.translate.v3.TranslationServiceProto .internal_static_google_cloud_translation_v3_BatchDocumentInputConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.translate.v3.BatchDocumentInputConfig.class, com.google.cloud.translate.v3.BatchDocumentInputConfig.Builder.class); } private int sourceCase_ = 0; @SuppressWarnings("serial") private java.lang.Object source_; public enum SourceCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { GCS_SOURCE(1), SOURCE_NOT_SET(0); private final int value; private SourceCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static SourceCase valueOf(int value) { return forNumber(value); } public static SourceCase forNumber(int value) { switch (value) { case 1: return GCS_SOURCE; case 0: return SOURCE_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public SourceCase getSourceCase() { return SourceCase.forNumber(sourceCase_); } public static final int GCS_SOURCE_FIELD_NUMBER = 1; /** * * * <pre> * Google Cloud Storage location for the source input. * This can be a single file (for example, * `gs://translation-test/input.docx`) or a wildcard (for example, * `gs://translation-test/&#42;`). * * File mime type is determined based on extension. Supported mime type * includes: * - `pdf`, application/pdf * - `docx`, * application/vnd.openxmlformats-officedocument.wordprocessingml.document * - `pptx`, * application/vnd.openxmlformats-officedocument.presentationml.presentation * - `xlsx`, * application/vnd.openxmlformats-officedocument.spreadsheetml.sheet * * The max file size to support for `.docx`, `.pptx` and `.xlsx` is 100MB. * The max file size to support for `.pdf` is 1GB and the max page limit is * 1000 pages. * The max file size to support for all input documents is 1GB. * </pre> * * <code>.google.cloud.translation.v3.GcsSource gcs_source = 1;</code> * * @return Whether the gcsSource field is set. */ @java.lang.Override public boolean hasGcsSource() { return sourceCase_ == 1; } /** * * * <pre> * Google Cloud Storage location for the source input. * This can be a single file (for example, * `gs://translation-test/input.docx`) or a wildcard (for example, * `gs://translation-test/&#42;`). * * File mime type is determined based on extension. Supported mime type * includes: * - `pdf`, application/pdf * - `docx`, * application/vnd.openxmlformats-officedocument.wordprocessingml.document * - `pptx`, * application/vnd.openxmlformats-officedocument.presentationml.presentation * - `xlsx`, * application/vnd.openxmlformats-officedocument.spreadsheetml.sheet * * The max file size to support for `.docx`, `.pptx` and `.xlsx` is 100MB. * The max file size to support for `.pdf` is 1GB and the max page limit is * 1000 pages. * The max file size to support for all input documents is 1GB. * </pre> * * <code>.google.cloud.translation.v3.GcsSource gcs_source = 1;</code> * * @return The gcsSource. */ @java.lang.Override public com.google.cloud.translate.v3.GcsSource getGcsSource() { if (sourceCase_ == 1) { return (com.google.cloud.translate.v3.GcsSource) source_; } return com.google.cloud.translate.v3.GcsSource.getDefaultInstance(); } /** * * * <pre> * Google Cloud Storage location for the source input. * This can be a single file (for example, * `gs://translation-test/input.docx`) or a wildcard (for example, * `gs://translation-test/&#42;`). * * File mime type is determined based on extension. Supported mime type * includes: * - `pdf`, application/pdf * - `docx`, * application/vnd.openxmlformats-officedocument.wordprocessingml.document * - `pptx`, * application/vnd.openxmlformats-officedocument.presentationml.presentation * - `xlsx`, * application/vnd.openxmlformats-officedocument.spreadsheetml.sheet * * The max file size to support for `.docx`, `.pptx` and `.xlsx` is 100MB. * The max file size to support for `.pdf` is 1GB and the max page limit is * 1000 pages. * The max file size to support for all input documents is 1GB. * </pre> * * <code>.google.cloud.translation.v3.GcsSource gcs_source = 1;</code> */ @java.lang.Override public com.google.cloud.translate.v3.GcsSourceOrBuilder getGcsSourceOrBuilder() { if (sourceCase_ == 1) { return (com.google.cloud.translate.v3.GcsSource) source_; } return com.google.cloud.translate.v3.GcsSource.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (sourceCase_ == 1) { output.writeMessage(1, (com.google.cloud.translate.v3.GcsSource) source_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (sourceCase_ == 1) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 1, (com.google.cloud.translate.v3.GcsSource) source_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.translate.v3.BatchDocumentInputConfig)) { return super.equals(obj); } com.google.cloud.translate.v3.BatchDocumentInputConfig other = (com.google.cloud.translate.v3.BatchDocumentInputConfig) obj; if (!getSourceCase().equals(other.getSourceCase())) return false; switch (sourceCase_) { case 1: if (!getGcsSource().equals(other.getGcsSource())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); switch (sourceCase_) { case 1: hash = (37 * hash) + GCS_SOURCE_FIELD_NUMBER; hash = (53 * hash) + getGcsSource().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.translate.v3.BatchDocumentInputConfig parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.translate.v3.BatchDocumentInputConfig parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.translate.v3.BatchDocumentInputConfig parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.translate.v3.BatchDocumentInputConfig parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.translate.v3.BatchDocumentInputConfig parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.translate.v3.BatchDocumentInputConfig parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.translate.v3.BatchDocumentInputConfig parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.translate.v3.BatchDocumentInputConfig parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.translate.v3.BatchDocumentInputConfig parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.translate.v3.BatchDocumentInputConfig parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.translate.v3.BatchDocumentInputConfig parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.translate.v3.BatchDocumentInputConfig parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.translate.v3.BatchDocumentInputConfig prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Input configuration for BatchTranslateDocument request. * </pre> * * Protobuf type {@code google.cloud.translation.v3.BatchDocumentInputConfig} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.translation.v3.BatchDocumentInputConfig) com.google.cloud.translate.v3.BatchDocumentInputConfigOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.translate.v3.TranslationServiceProto .internal_static_google_cloud_translation_v3_BatchDocumentInputConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.translate.v3.TranslationServiceProto .internal_static_google_cloud_translation_v3_BatchDocumentInputConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.translate.v3.BatchDocumentInputConfig.class, com.google.cloud.translate.v3.BatchDocumentInputConfig.Builder.class); } // Construct using com.google.cloud.translate.v3.BatchDocumentInputConfig.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (gcsSourceBuilder_ != null) { gcsSourceBuilder_.clear(); } sourceCase_ = 0; source_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.translate.v3.TranslationServiceProto .internal_static_google_cloud_translation_v3_BatchDocumentInputConfig_descriptor; } @java.lang.Override public com.google.cloud.translate.v3.BatchDocumentInputConfig getDefaultInstanceForType() { return com.google.cloud.translate.v3.BatchDocumentInputConfig.getDefaultInstance(); } @java.lang.Override public com.google.cloud.translate.v3.BatchDocumentInputConfig build() { com.google.cloud.translate.v3.BatchDocumentInputConfig result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.translate.v3.BatchDocumentInputConfig buildPartial() { com.google.cloud.translate.v3.BatchDocumentInputConfig result = new com.google.cloud.translate.v3.BatchDocumentInputConfig(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.cloud.translate.v3.BatchDocumentInputConfig result) { int from_bitField0_ = bitField0_; } private void buildPartialOneofs(com.google.cloud.translate.v3.BatchDocumentInputConfig result) { result.sourceCase_ = sourceCase_; result.source_ = this.source_; if (sourceCase_ == 1 && gcsSourceBuilder_ != null) { result.source_ = gcsSourceBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.translate.v3.BatchDocumentInputConfig) { return mergeFrom((com.google.cloud.translate.v3.BatchDocumentInputConfig) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.translate.v3.BatchDocumentInputConfig other) { if (other == com.google.cloud.translate.v3.BatchDocumentInputConfig.getDefaultInstance()) return this; switch (other.getSourceCase()) { case GCS_SOURCE: { mergeGcsSource(other.getGcsSource()); break; } case SOURCE_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getGcsSourceFieldBuilder().getBuilder(), extensionRegistry); sourceCase_ = 1; break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int sourceCase_ = 0; private java.lang.Object source_; public SourceCase getSourceCase() { return SourceCase.forNumber(sourceCase_); } public Builder clearSource() { sourceCase_ = 0; source_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.translate.v3.GcsSource, com.google.cloud.translate.v3.GcsSource.Builder, com.google.cloud.translate.v3.GcsSourceOrBuilder> gcsSourceBuilder_; /** * * * <pre> * Google Cloud Storage location for the source input. * This can be a single file (for example, * `gs://translation-test/input.docx`) or a wildcard (for example, * `gs://translation-test/&#42;`). * * File mime type is determined based on extension. Supported mime type * includes: * - `pdf`, application/pdf * - `docx`, * application/vnd.openxmlformats-officedocument.wordprocessingml.document * - `pptx`, * application/vnd.openxmlformats-officedocument.presentationml.presentation * - `xlsx`, * application/vnd.openxmlformats-officedocument.spreadsheetml.sheet * * The max file size to support for `.docx`, `.pptx` and `.xlsx` is 100MB. * The max file size to support for `.pdf` is 1GB and the max page limit is * 1000 pages. * The max file size to support for all input documents is 1GB. * </pre> * * <code>.google.cloud.translation.v3.GcsSource gcs_source = 1;</code> * * @return Whether the gcsSource field is set. */ @java.lang.Override public boolean hasGcsSource() { return sourceCase_ == 1; } /** * * * <pre> * Google Cloud Storage location for the source input. * This can be a single file (for example, * `gs://translation-test/input.docx`) or a wildcard (for example, * `gs://translation-test/&#42;`). * * File mime type is determined based on extension. Supported mime type * includes: * - `pdf`, application/pdf * - `docx`, * application/vnd.openxmlformats-officedocument.wordprocessingml.document * - `pptx`, * application/vnd.openxmlformats-officedocument.presentationml.presentation * - `xlsx`, * application/vnd.openxmlformats-officedocument.spreadsheetml.sheet * * The max file size to support for `.docx`, `.pptx` and `.xlsx` is 100MB. * The max file size to support for `.pdf` is 1GB and the max page limit is * 1000 pages. * The max file size to support for all input documents is 1GB. * </pre> * * <code>.google.cloud.translation.v3.GcsSource gcs_source = 1;</code> * * @return The gcsSource. */ @java.lang.Override public com.google.cloud.translate.v3.GcsSource getGcsSource() { if (gcsSourceBuilder_ == null) { if (sourceCase_ == 1) { return (com.google.cloud.translate.v3.GcsSource) source_; } return com.google.cloud.translate.v3.GcsSource.getDefaultInstance(); } else { if (sourceCase_ == 1) { return gcsSourceBuilder_.getMessage(); } return com.google.cloud.translate.v3.GcsSource.getDefaultInstance(); } } /** * * * <pre> * Google Cloud Storage location for the source input. * This can be a single file (for example, * `gs://translation-test/input.docx`) or a wildcard (for example, * `gs://translation-test/&#42;`). * * File mime type is determined based on extension. Supported mime type * includes: * - `pdf`, application/pdf * - `docx`, * application/vnd.openxmlformats-officedocument.wordprocessingml.document * - `pptx`, * application/vnd.openxmlformats-officedocument.presentationml.presentation * - `xlsx`, * application/vnd.openxmlformats-officedocument.spreadsheetml.sheet * * The max file size to support for `.docx`, `.pptx` and `.xlsx` is 100MB. * The max file size to support for `.pdf` is 1GB and the max page limit is * 1000 pages. * The max file size to support for all input documents is 1GB. * </pre> * * <code>.google.cloud.translation.v3.GcsSource gcs_source = 1;</code> */ public Builder setGcsSource(com.google.cloud.translate.v3.GcsSource value) { if (gcsSourceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } source_ = value; onChanged(); } else { gcsSourceBuilder_.setMessage(value); } sourceCase_ = 1; return this; } /** * * * <pre> * Google Cloud Storage location for the source input. * This can be a single file (for example, * `gs://translation-test/input.docx`) or a wildcard (for example, * `gs://translation-test/&#42;`). * * File mime type is determined based on extension. Supported mime type * includes: * - `pdf`, application/pdf * - `docx`, * application/vnd.openxmlformats-officedocument.wordprocessingml.document * - `pptx`, * application/vnd.openxmlformats-officedocument.presentationml.presentation * - `xlsx`, * application/vnd.openxmlformats-officedocument.spreadsheetml.sheet * * The max file size to support for `.docx`, `.pptx` and `.xlsx` is 100MB. * The max file size to support for `.pdf` is 1GB and the max page limit is * 1000 pages. * The max file size to support for all input documents is 1GB. * </pre> * * <code>.google.cloud.translation.v3.GcsSource gcs_source = 1;</code> */ public Builder setGcsSource(com.google.cloud.translate.v3.GcsSource.Builder builderForValue) { if (gcsSourceBuilder_ == null) { source_ = builderForValue.build(); onChanged(); } else { gcsSourceBuilder_.setMessage(builderForValue.build()); } sourceCase_ = 1; return this; } /** * * * <pre> * Google Cloud Storage location for the source input. * This can be a single file (for example, * `gs://translation-test/input.docx`) or a wildcard (for example, * `gs://translation-test/&#42;`). * * File mime type is determined based on extension. Supported mime type * includes: * - `pdf`, application/pdf * - `docx`, * application/vnd.openxmlformats-officedocument.wordprocessingml.document * - `pptx`, * application/vnd.openxmlformats-officedocument.presentationml.presentation * - `xlsx`, * application/vnd.openxmlformats-officedocument.spreadsheetml.sheet * * The max file size to support for `.docx`, `.pptx` and `.xlsx` is 100MB. * The max file size to support for `.pdf` is 1GB and the max page limit is * 1000 pages. * The max file size to support for all input documents is 1GB. * </pre> * * <code>.google.cloud.translation.v3.GcsSource gcs_source = 1;</code> */ public Builder mergeGcsSource(com.google.cloud.translate.v3.GcsSource value) { if (gcsSourceBuilder_ == null) { if (sourceCase_ == 1 && source_ != com.google.cloud.translate.v3.GcsSource.getDefaultInstance()) { source_ = com.google.cloud.translate.v3.GcsSource.newBuilder( (com.google.cloud.translate.v3.GcsSource) source_) .mergeFrom(value) .buildPartial(); } else { source_ = value; } onChanged(); } else { if (sourceCase_ == 1) { gcsSourceBuilder_.mergeFrom(value); } else { gcsSourceBuilder_.setMessage(value); } } sourceCase_ = 1; return this; } /** * * * <pre> * Google Cloud Storage location for the source input. * This can be a single file (for example, * `gs://translation-test/input.docx`) or a wildcard (for example, * `gs://translation-test/&#42;`). * * File mime type is determined based on extension. Supported mime type * includes: * - `pdf`, application/pdf * - `docx`, * application/vnd.openxmlformats-officedocument.wordprocessingml.document * - `pptx`, * application/vnd.openxmlformats-officedocument.presentationml.presentation * - `xlsx`, * application/vnd.openxmlformats-officedocument.spreadsheetml.sheet * * The max file size to support for `.docx`, `.pptx` and `.xlsx` is 100MB. * The max file size to support for `.pdf` is 1GB and the max page limit is * 1000 pages. * The max file size to support for all input documents is 1GB. * </pre> * * <code>.google.cloud.translation.v3.GcsSource gcs_source = 1;</code> */ public Builder clearGcsSource() { if (gcsSourceBuilder_ == null) { if (sourceCase_ == 1) { sourceCase_ = 0; source_ = null; onChanged(); } } else { if (sourceCase_ == 1) { sourceCase_ = 0; source_ = null; } gcsSourceBuilder_.clear(); } return this; } /** * * * <pre> * Google Cloud Storage location for the source input. * This can be a single file (for example, * `gs://translation-test/input.docx`) or a wildcard (for example, * `gs://translation-test/&#42;`). * * File mime type is determined based on extension. Supported mime type * includes: * - `pdf`, application/pdf * - `docx`, * application/vnd.openxmlformats-officedocument.wordprocessingml.document * - `pptx`, * application/vnd.openxmlformats-officedocument.presentationml.presentation * - `xlsx`, * application/vnd.openxmlformats-officedocument.spreadsheetml.sheet * * The max file size to support for `.docx`, `.pptx` and `.xlsx` is 100MB. * The max file size to support for `.pdf` is 1GB and the max page limit is * 1000 pages. * The max file size to support for all input documents is 1GB. * </pre> * * <code>.google.cloud.translation.v3.GcsSource gcs_source = 1;</code> */ public com.google.cloud.translate.v3.GcsSource.Builder getGcsSourceBuilder() { return getGcsSourceFieldBuilder().getBuilder(); } /** * * * <pre> * Google Cloud Storage location for the source input. * This can be a single file (for example, * `gs://translation-test/input.docx`) or a wildcard (for example, * `gs://translation-test/&#42;`). * * File mime type is determined based on extension. Supported mime type * includes: * - `pdf`, application/pdf * - `docx`, * application/vnd.openxmlformats-officedocument.wordprocessingml.document * - `pptx`, * application/vnd.openxmlformats-officedocument.presentationml.presentation * - `xlsx`, * application/vnd.openxmlformats-officedocument.spreadsheetml.sheet * * The max file size to support for `.docx`, `.pptx` and `.xlsx` is 100MB. * The max file size to support for `.pdf` is 1GB and the max page limit is * 1000 pages. * The max file size to support for all input documents is 1GB. * </pre> * * <code>.google.cloud.translation.v3.GcsSource gcs_source = 1;</code> */ @java.lang.Override public com.google.cloud.translate.v3.GcsSourceOrBuilder getGcsSourceOrBuilder() { if ((sourceCase_ == 1) && (gcsSourceBuilder_ != null)) { return gcsSourceBuilder_.getMessageOrBuilder(); } else { if (sourceCase_ == 1) { return (com.google.cloud.translate.v3.GcsSource) source_; } return com.google.cloud.translate.v3.GcsSource.getDefaultInstance(); } } /** * * * <pre> * Google Cloud Storage location for the source input. * This can be a single file (for example, * `gs://translation-test/input.docx`) or a wildcard (for example, * `gs://translation-test/&#42;`). * * File mime type is determined based on extension. Supported mime type * includes: * - `pdf`, application/pdf * - `docx`, * application/vnd.openxmlformats-officedocument.wordprocessingml.document * - `pptx`, * application/vnd.openxmlformats-officedocument.presentationml.presentation * - `xlsx`, * application/vnd.openxmlformats-officedocument.spreadsheetml.sheet * * The max file size to support for `.docx`, `.pptx` and `.xlsx` is 100MB. * The max file size to support for `.pdf` is 1GB and the max page limit is * 1000 pages. * The max file size to support for all input documents is 1GB. * </pre> * * <code>.google.cloud.translation.v3.GcsSource gcs_source = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.translate.v3.GcsSource, com.google.cloud.translate.v3.GcsSource.Builder, com.google.cloud.translate.v3.GcsSourceOrBuilder> getGcsSourceFieldBuilder() { if (gcsSourceBuilder_ == null) { if (!(sourceCase_ == 1)) { source_ = com.google.cloud.translate.v3.GcsSource.getDefaultInstance(); } gcsSourceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.translate.v3.GcsSource, com.google.cloud.translate.v3.GcsSource.Builder, com.google.cloud.translate.v3.GcsSourceOrBuilder>( (com.google.cloud.translate.v3.GcsSource) source_, getParentForChildren(), isClean()); source_ = null; } sourceCase_ = 1; onChanged(); return gcsSourceBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.translation.v3.BatchDocumentInputConfig) } // @@protoc_insertion_point(class_scope:google.cloud.translation.v3.BatchDocumentInputConfig) private static final com.google.cloud.translate.v3.BatchDocumentInputConfig DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.translate.v3.BatchDocumentInputConfig(); } public static com.google.cloud.translate.v3.BatchDocumentInputConfig getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<BatchDocumentInputConfig> PARSER = new com.google.protobuf.AbstractParser<BatchDocumentInputConfig>() { @java.lang.Override public BatchDocumentInputConfig parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<BatchDocumentInputConfig> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<BatchDocumentInputConfig> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.translate.v3.BatchDocumentInputConfig getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
google/santa-tracker-android
36,391
presenttoss/src/main/java/com/google/android/apps/santatracker/doodles/presenttoss/PresentTossModel.java
/* * Copyright 2019. Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.apps.santatracker.doodles.presenttoss; import android.content.Context; import android.content.res.Resources; import android.graphics.Paint; import android.os.Vibrator; import com.google.android.apps.santatracker.doodles.presenttoss.ElfActor.WaterPoloActorPart; import com.google.android.apps.santatracker.doodles.shared.ColoredRectangleActor; import com.google.android.apps.santatracker.doodles.shared.EventBus; import com.google.android.apps.santatracker.doodles.shared.Vector2D; import com.google.android.apps.santatracker.doodles.shared.actor.Actor; import com.google.android.apps.santatracker.doodles.shared.actor.ActorHelper; import com.google.android.apps.santatracker.doodles.shared.actor.CameraShake; import com.google.android.apps.santatracker.doodles.shared.actor.RectangularInstructionActor; import com.google.android.apps.santatracker.doodles.shared.actor.SpriteActor; import com.google.android.apps.santatracker.doodles.shared.actor.TextActor; import com.google.android.apps.santatracker.doodles.shared.animation.ActorTween; import com.google.android.apps.santatracker.doodles.shared.animation.ActorTween.Callback; import com.google.android.apps.santatracker.doodles.shared.animation.AnimatedSprite; import com.google.android.apps.santatracker.doodles.shared.animation.AnimatedSprite.AnimatedSpriteListener; import com.google.android.apps.santatracker.doodles.shared.animation.EmptyTween; import com.google.android.apps.santatracker.doodles.shared.animation.Interpolator; import com.google.android.apps.santatracker.doodles.shared.animation.Tween; import com.google.android.apps.santatracker.doodles.shared.animation.TweenManager; import com.google.android.apps.santatracker.doodles.shared.views.GameFragment; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Locale; /** Model for the Water Polo game. */ public class PresentTossModel { public static final int WATER_POLO_HEIGHT = 960; public static final int WATER_POLO_WIDTH = 540; public static final int ONE_STAR_THRESHOLD = 1; public static final int TWO_STAR_THRESHOLD = 10; public static final int THREE_STAR_THRESHOLD = 20; private static final String TAG = PresentTossModel.class.getSimpleName(); private static final float SHAKE_FALLOFF = 0.9f; private static final int VIBRATION_SMALL = 40; private static final float TIME_LEFT_TEXT_X = WATER_POLO_WIDTH * 0.5f; private static final float TIME_LEFT_TEXT_Y = 12; private static final float TIME_LEFT_TEXT_SCALE = 3.2f; private static final String TIME_LEFT_UNDER_TEXT = "88:88"; private static final int TIME_LEFT_TEXT_RGB = 0xFFFFBD2E; private static final int TIME_LEFT_TEXT_GLOW_RGB = 0x88FF9A2E; private static final int TIME_LEFT_UNDER_RGB = 0xFF3B200D; private static final float POINT_TEXT_ANIMATION_TIME = 1.6f; private static final int POINT_MINUS_TEXT_RGB = 0xffd61e1e; private static final int POINT_PLUS_TEXT_RGB = 0xff4dab1f; // Currently throw delay is not supported. The constant and related variables are left in to // facilitate prototyping in case throw delay would be supported in the future. private static final float THROW_DELAY_SECONDS = 0.4f; private static final float TOTAL_TIME = 30f; private static final int BALL_SPEED = 1300; private static final int OPPONENT_ONE_ENTRANCE_THRESHOLD = 1; private static final int OPPONENT_TWO_ENTRANCE_THRESHOLD = 4; private static final int OPPONENT_THREE_ENTRANCE_THRESHOLD = 10; private static final int OPPONENT_ONE_SPEED = 60; private static final int OPPONENT_TWO_SPEED = 120; private static final int OPPONENT_THREE_SPEED = 170; private static final int MS_BETWEEN_TARGET_FLASHES = 1000; private static final float GOAL_BOX_X = 108; private static final float GOAL_BOX_Y = 75; private static final float GOAL_BOX_WIDTH = 333; private static final float GOAL_BOX_HEIGHT = 98; private final Vibrator vibrator; public CameraShake cameraShake; public List<Actor> actors; public State state; public int score; public int ballsThrown; public long titleDurationMs = GameFragment.TITLE_DURATION_MS; RectangularInstructionActor instructions; private List<Actor> effects; private Resources resources; private EventBus eventBus; private TweenManager tweenManager; private ElfActor player; private ElfActor opponentOne; private ElfActor opponentTwo; private ElfActor opponentThree; private ColoredRectangleActor timeLeftFrameBorder; private ColoredRectangleActor timeLeftFrame; private TextActor timeLeftText; private TextActor timeLeftTextGlow; private TextActor timeLeftUnder; private List<PresentActor> balls; private SpriteActor slideBack; private SpriteActor targetLeft; private SpriteActor targetMiddle; private SpriteActor targetRight; private SpriteActor goal; private float time; private float msTillNextTargetPulse; private boolean scoredAtLeastOneShot; private boolean canThrow; private boolean didReset; public PresentTossModel(Resources resources, Context context) { this.resources = resources; actors = Collections.synchronizedList(new ArrayList<Actor>()); effects = new ArrayList<>(); balls = new ArrayList<>(); cameraShake = new CameraShake(); actors.add(cameraShake); tweenManager = new TweenManager(); eventBus = EventBus.getInstance(); vibrator = (Vibrator) context.getSystemService(Context.VIBRATOR_SERVICE); createActors(); reset(true); } private void createActors() { Actor pool = actorWithIds(PresentTossSprites.present_throw_floor); pool.zIndex = -3; // The pool image is 780x960, but the center 540x960 of that image is what corresponds // to the game area, so offset the image to the left. The edges only show on screens with // aspect // ratios wider than 9:16. pool.position.x = -120; actors.add(pool); goal = actorWithIds(PresentTossSprites.present_throw_santabag); goal.position.set(-14, 72); goal.zIndex = -4; goal.sprite.setLoop(false); goal.sprite.setPaused(true); goal.sprite.addListener( new AnimatedSpriteListener() { @Override public void onFinished() { goal.sprite.setFrameIndex(0); goal.sprite.setPaused(true); } }); actors.add(goal); slideBack = actorWithIds(PresentTossSprites.present_throw_elfbag); slideBack.zIndex = 4; actors.add(slideBack); moveSlide(0); timeLeftFrame = new ColoredRectangleActor( Vector2D.get(WATER_POLO_WIDTH * 0.35f, TIME_LEFT_TEXT_Y - 6), Vector2D.get(WATER_POLO_WIDTH * 0.3f, 50)); timeLeftFrame.setColor(0xff000000); timeLeftFrame.zIndex = 4; actors.add(timeLeftFrame); timeLeftFrameBorder = new ColoredRectangleActor( Vector2D.get(WATER_POLO_WIDTH * 0.35f, TIME_LEFT_TEXT_Y - 6), Vector2D.get(WATER_POLO_WIDTH * 0.3f, 50)); timeLeftFrameBorder.setStyle(Paint.Style.STROKE); timeLeftFrameBorder.setStrokeWidth(5); timeLeftFrameBorder.setColor(0xff555555); timeLeftFrameBorder.zIndex = 4; actors.add(timeLeftFrameBorder); timeLeftUnder = new TextActor(TIME_LEFT_UNDER_TEXT); timeLeftUnder.position.set(TIME_LEFT_TEXT_X, TIME_LEFT_TEXT_Y); timeLeftUnder.scale = TIME_LEFT_TEXT_SCALE; timeLeftUnder.setBold(true); timeLeftUnder.setFont(resources.getAssets(), "dseg7.ttf"); timeLeftUnder.setColor(TIME_LEFT_UNDER_RGB); timeLeftUnder.alignCenter(); timeLeftUnder.zIndex = 4; actors.add(timeLeftUnder); timeLeftTextGlow = new TextActor("00:30"); timeLeftTextGlow.position.set(TIME_LEFT_TEXT_X, TIME_LEFT_TEXT_Y); timeLeftTextGlow.scale = TIME_LEFT_TEXT_SCALE; timeLeftTextGlow.setBold(true); timeLeftTextGlow.enableBlur(0.6f); timeLeftTextGlow.setFont(resources.getAssets(), "dseg7.ttf"); timeLeftTextGlow.setColor(TIME_LEFT_TEXT_GLOW_RGB); timeLeftTextGlow.alignCenter(); timeLeftTextGlow.zIndex = 5; actors.add(timeLeftTextGlow); timeLeftText = new TextActor("00:30"); timeLeftText.position.set(TIME_LEFT_TEXT_X, TIME_LEFT_TEXT_Y); timeLeftText.scale = TIME_LEFT_TEXT_SCALE; timeLeftText.setBold(true); timeLeftText.setFont(resources.getAssets(), "dseg7.ttf"); timeLeftText.setColor(TIME_LEFT_TEXT_RGB); timeLeftText.alignCenter(); timeLeftText.zIndex = 6; actors.add(timeLeftText); targetLeft = createTarget(166, 128); actors.add(targetLeft); targetMiddle = createTarget(273, 128); actors.add(targetMiddle); targetRight = createTarget(380, 128); actors.add(targetRight); // TODO: Change block sprites opponentOne = createOpponent( 0.8f, PresentTossSprites.present_throw_def_orange_left, PresentTossSprites.present_throw_def_orange_right, PresentTossSprites.present_throw_def_orange_emerge, PresentTossSprites.present_throw_def_orange_blocking); actors.add(opponentOne); opponentTwo = createOpponent( 0.9f, PresentTossSprites.present_throw_def_green_left, PresentTossSprites.present_throw_def_green_right, PresentTossSprites.present_throw_def_green_emerge, PresentTossSprites.present_throw_def_green_blocking); actors.add(opponentTwo); opponentThree = createOpponent( 1.0f, PresentTossSprites.present_throw_def_red_left, PresentTossSprites.present_throw_def_red_right, PresentTossSprites.present_throw_def_red_emerge, PresentTossSprites.present_throw_def_red_blocking); actors.add(opponentThree); player = new ElfActor(); player.addSprite( WaterPoloActorPart.BodyIdle, -84, 180, spriteWithIds(PresentTossSprites.present_throw_idle)); player.addSprite( WaterPoloActorPart.BodyThrow, -84, 180, spriteWithIds(PresentTossSprites.present_throw_throwing)); player.addSprite( WaterPoloActorPart.BodyPickUpBall, -84, 180, spriteWithIds(PresentTossSprites.present_throw_reloading)); player.addSprite( WaterPoloActorPart.BodyIdleNoBall, -84, 180, spriteWithIds(PresentTossSprites.present_throw_celebrate, 2)); actors.add(player); AnimatedSprite diagram = spriteWithIds(PresentTossSprites.present_throw_tutorials); diagram.setFPS(7); instructions = new RectangularInstructionActor(resources, diagram); instructions.hidden = true; instructions.scale = 0.6f; instructions.position.set( WATER_POLO_WIDTH * 0.5f - instructions.getScaledWidth() / 2, WATER_POLO_HEIGHT * 0.46f - instructions.getScaledHeight() / 2f); actors.add(instructions); } SpriteActor createTarget(float x, float y) { SpriteActor target = actorWithIds(PresentTossSprites.present_toss_target); target.sprite.setAnchor(target.sprite.frameWidth / 2, target.sprite.frameHeight / 2); target.position.set(x, y); target.hidden = true; return target; } ElfActor createOpponent( float scale, int[] leftSprite, int[] rightSprite, int[] emergeSprite, int[] blockSprite) { ElfActor opponent = new ElfActor(); opponent.addSprite( WaterPoloActorPart.BodyEntrance, -45, 142, spriteWithIds(emergeSprite, 12)); opponent.addSprite(WaterPoloActorPart.BodyLeft, -45, 142, spriteWithIds(leftSprite, 6)); opponent.addSprite(WaterPoloActorPart.BodyRight, -45, 142, spriteWithIds(rightSprite, 6)); opponent.addSprite(WaterPoloActorPart.BodyBlock, -45, 142, spriteWithIds(blockSprite)); opponent.scale = scale; opponent.setCollisionBox(100, 90); return opponent; } /** * Moves the slide left / right. Used to attach the slide to the side of the screen on different * aspect ratio screens. */ public void moveSlide(float slideOffsetX) { slideBack.position.set(300 + slideOffsetX, 760); } // Put everything back to the beginning state. // Used at start of game & also if user clicks replay. public void reset(boolean firstPlay) { tweenManager.removeAll(); if (firstPlay) { setState(State.TITLE); } else { setState(State.WAITING); } // Remove all temporary effects for (int i = effects.size() - 1; i >= 0; i--) { Actor effect = effects.get(i); actors.remove(effect); effects.remove(effect); } // Remove all balls for (int i = balls.size() - 1; i >= 0; i--) { ThrownActor ball = balls.get(i); actors.remove(ball); balls.remove(ball); } score = 0; scoredAtLeastOneShot = false; ballsThrown = 0; eventBus.sendEvent(EventBus.SCORE_CHANGED, score); // No opponents at start of game (give them 1 easy point to re-inforce that they are // supposed // to get goals, not attack the goalie). opponentOne.hidden = true; opponentTwo.hidden = true; opponentThree.hidden = true; msTillNextTargetPulse = MS_BETWEEN_TARGET_FLASHES; // Y positions picked so opponents look evenly spaced when in perspective. // X positions picked to make opponents come up on the left side (because they swim right // first, so coming up on the left side gives room to swim) but not all at the same position // (staggered looks better). opponentOne.position.set(171, 300); opponentTwo.position.set(271, 440); opponentThree.position.set(171, 600); player.position.set(198, 845); player.idle(); canThrow = true; maybePickUpAnotherBall(); if (firstPlay) { // TODO: If you swipe before the instructions show, they still show. // Fix this by adding a state machine like the android games have. tweenManager.add( new EmptyTween(1.3f) { @Override protected void onFinish() { if (ballsThrown == 0) { instructions.show(); } } }); } didReset = true; time = TOTAL_TIME; } public void update(long deltaMs) { // Track whether reset was called at any point. // If so, this short-circuits the rest of the update. didReset = false; if (state == State.PLAYING) { time = time - ((int) deltaMs / 1000f); if (time <= 0) { time = 0; player.idleNoBall(); setState(State.GAME_OVER); } } updateTimeLeftText(); tweenManager.update(deltaMs); if (didReset) { return; } synchronized (actors) { for (int i = actors.size() - 1; i >= 0; i--) { Actor actor = actors.get(i); actor.update(deltaMs); if (didReset) { return; } } } for (int i = balls.size() - 1; i >= 0; i--) { updateBall(balls.get(i)); } // Show the targets until the player gets at least 1 shot in the goal. if (!scoredAtLeastOneShot) { float msTillNextTargetPulseBefore = msTillNextTargetPulse; msTillNextTargetPulse -= deltaMs; if (msTillNextTargetPulseBefore > 0 && msTillNextTargetPulse <= 0) { pulseTargets(); } } synchronized (actors) { Collections.sort(actors); } } void updateTimeLeftText() { String timeLeftString = "00:" + String.format(Locale.ENGLISH, "%02d", (int) time); timeLeftText.setText(timeLeftString); timeLeftTextGlow.setText(timeLeftString); } private void updateBall(final PresentActor ball) { // Make the ball shrink as it travels into the distance. float ballStartY = 753; float ballEndY = 157; if (!ball.shotBlocked) { ball.scale = 0.5f + (0.5f * (ball.position.y - ballEndY) / (ballStartY - ballEndY)); } // Only allow blocking if ball is traveling towards goal (otherwise ball can get stuck // bouncing // between opponents). if (ball.velocity.y < 0) { final ElfActor blockingOpponent = getBlockingOpponentIfAny(ball); if (blockingOpponent != null) { // Draw a -1 at the ball addPointText(ball.position.x, ball.position.y - 150, "-1", POINT_MINUS_TEXT_RGB); newScore(score - 1); blockShot(ball, blockingOpponent); } } // TODO: at small scales, ball isn't centered over its position. // TODO: randomize vertical position of splats if (!ball.shotBlocked && goalBoxContains(ball.position.x, ball.position.y)) { // Draw a +1 at the ball. if (ball.bounces == 2) { addPointText(ball.position.x, ball.position.y, "+2", POINT_PLUS_TEXT_RGB); newScore(score + 2); } else { addPointText(ball.position.x, ball.position.y, "+1", POINT_PLUS_TEXT_RGB); newScore(score + 1); } scoreShot(ball); } // Check if ball has left screen. If so, count it as a miss and get ready for another throw. if ((ball.position.y < 0 && !ball.shotBlocked)) { missShot(ball); } // Check if ball touches the left and right wall and has not bounced off the wall twice. // If so, bounce it off the wall. // If not, remove the ball from play. if (ball.position.x <= 0 || ball.position.x >= WATER_POLO_WIDTH) { if (ball.bounces > 1 || ball.shotBlocked) { missShot(ball); } else { ball.velocity.x = -ball.velocity.x; ball.rotation = (float) (Math.atan(ball.velocity.y / ball.velocity.x) - (Math.PI / 2)); ball.update(20); ball.bounces++; EventBus.getInstance().sendEvent(EventBus.PLAY_SOUND, R.raw.present_throw_block); } } // Ditto for the bottom wall. if (ball.position.y > WATER_POLO_HEIGHT) { if (ball.bounces > 1 || ball.shotBlocked) { missShot(ball); } else { ball.velocity.y = -ball.velocity.y; ball.rotation = (float) (Math.atan(ball.velocity.y / ball.velocity.x) - (Math.PI / 2)); ball.update(20); ball.bounces++; EventBus.getInstance().sendEvent(EventBus.PLAY_SOUND, R.raw.present_throw_block); } } } private void addBall(float radians) { AnimatedSprite ballSprite = spriteWithIds(PresentTossSprites.present_throw_thrownpresent); PresentActor ball = new PresentActor(ballSprite); ball.shouldScaleWithHeight = false; ball.zIndex = 20; ball.position.set(player.position.x + 78, player.position.y - 89); ball.clearStreak(); ball.rotation = radians + (float) Math.PI / 2; ball.velocity.set( BALL_SPEED * (float) Math.cos(radians), BALL_SPEED * (float) Math.sin(radians)); ball.hidden = false; actors.add(ball); balls.add(ball); } private void blockBall(final PresentActor ball, ElfActor blockingOpponent) { float rotation = ball.velocity.x / 200; float directionX = ball.velocity.x * 0.5f; if (ball.velocity.x > 0) { directionX += 25; } else { directionX -= 25; } float initialX = ball.position.x; float finalX = initialX + directionX; float initialY = ball.position.y; float midY = initialY - 140; float finalY = initialY + 220; ball.velocity.set(0, 0); ball.shotBlocked = true; final ActorTween secondYTween = new ActorTween(ball) { @Override protected void onFinish() { if (actors.contains(ball)) { missShot(ball); } } }.fromY(midY).toY(finalY).withInterpolator(Interpolator.EASE_IN).withDuration(0.4f); final ActorTween firstYTween = new ActorTween(ball) { @Override protected void onFinish() { tweenManager.add(secondYTween); } }.fromY(initialY) .toY(midY) .withInterpolator(Interpolator.EASE_OUT) .withDuration(0.4f); final ActorTween xTween = new ActorTween(ball) .fromX(initialX) .toX(finalX) .withRotation(0, rotation) .withDuration(0.8f); tweenManager.add(firstYTween); tweenManager.add(xTween); } private boolean goalBoxContains(float x, float y) { if (x < GOAL_BOX_X || x > GOAL_BOX_X + GOAL_BOX_WIDTH) { return false; } if (y < GOAL_BOX_Y || y > GOAL_BOX_Y + GOAL_BOX_HEIGHT) { return false; } return true; } private void pulseTargets() { targetLeft.hidden = false; targetMiddle.hidden = false; targetRight.hidden = false; targetLeft.alpha = 1; targetMiddle.alpha = 1; targetRight.alpha = 1; final float startScale = 1; final float endScale = 0.6f; final float bounceScale = 0.8f; final Tween fadeout = new Tween(0.2f) { @Override protected void updateValues(float percentDone) { float alpha = Interpolator.FAST_IN.getValue(percentDone, 1, 0); targetLeft.alpha = alpha; targetMiddle.alpha = alpha; targetRight.alpha = alpha; } @Override protected void onFinish() { targetLeft.hidden = true; targetMiddle.hidden = true; targetRight.hidden = true; msTillNextTargetPulse = MS_BETWEEN_TARGET_FLASHES; } }; final Tween bounceThree = new Tween(0.15f) { @Override protected void updateValues(float percentDone) { float scale = Interpolator.FAST_IN.getValue(percentDone, bounceScale, endScale); targetLeft.scale = scale; targetMiddle.scale = scale; targetRight.scale = scale; } @Override protected void onFinish() { tweenManager.add(fadeout); } }; final Tween bounceTwo = new Tween(0.15f) { @Override protected void updateValues(float percentDone) { float scale = Interpolator.FAST_IN.getValue(percentDone, bounceScale, endScale); targetLeft.scale = scale; targetMiddle.scale = scale; targetRight.scale = scale; } @Override protected void onFinish() { tweenManager.add(score == 0 ? bounceThree : fadeout); } }; final Tween bounceOne = new Tween(0.15f) { @Override protected void updateValues(float percentDone) { float scale = Interpolator.FAST_IN.getValue(percentDone, bounceScale, endScale); targetLeft.scale = scale; targetMiddle.scale = scale; targetRight.scale = scale; } @Override protected void onFinish() { tweenManager.add(score == 0 ? bounceTwo : fadeout); } }; Tween shrinkIn = new Tween(0.3f) { @Override protected void updateValues(float percentDone) { float scale = Interpolator.FAST_IN.getValue(percentDone, startScale, endScale); targetLeft.scale = scale; targetMiddle.scale = scale; targetRight.scale = scale; } @Override protected void onFinish() { tweenManager.add(score == 0 ? bounceOne : fadeout); } }; tweenManager.add(shrinkIn); } ElfActor getBlockingOpponentIfAny(PresentActor ball) { if (ball.shotBlocked) { return null; // Already blocked once, let it go. } if (opponentOne.canBlock(ball.position.x, ball.position.y)) { return opponentOne; } else if (opponentTwo.canBlock(ball.position.x, ball.position.y)) { return opponentTwo; } else if (opponentThree.canBlock(ball.position.x, ball.position.y)) { return opponentThree; } return null; } private void blockShot(final PresentActor ball, final ElfActor blockingOpponent) { // Blocked! ball.shotBlocked = true; EventBus.getInstance().sendEvent(EventBus.PLAY_SOUND, R.raw.tennis_eliminate); blockingOpponent.blockShot( new Callback() { @Override public void call() { blockBall(ball, blockingOpponent); shake(1, VIBRATION_SMALL); } }); } private void missShot(PresentActor ball) { actors.remove(ball); balls.remove(ball); } private void scoreShot(PresentActor ball) { actors.remove(ball); balls.remove(ball); // Swap ball for splat. final SpriteActor splat = actorWithIds(PresentTossSprites.orange_present_falling); splat.position.set( ball.position.x - splat.sprite.frameWidth / 2, ball.position.y - splat.sprite.frameHeight); splat.zIndex = -1; splat.sprite.setLoop(false); splat.sprite.addListener( new AnimatedSpriteListener() { @Override public void onFinished() { actors.remove(splat); effects.remove(splat); } }); actors.add(splat); effects.add(splat); // Shake the goal. goal.sprite.setPaused(false); // Play goal score sound. EventBus.getInstance().sendEvent(EventBus.PLAY_SOUND, R.raw.present_throw_goal); // Shake the screen a little bit. shake(1, VIBRATION_SMALL); // Reset ball. ball.velocity.set(0, 0); ball.position.set(player.position.x, player.position.y); // Time for more opponents? if (score >= OPPONENT_ONE_ENTRANCE_THRESHOLD && opponentOne.hidden) { bringInOpponent(opponentOne, OPPONENT_ONE_SPEED); } if (score >= OPPONENT_TWO_ENTRANCE_THRESHOLD && opponentTwo.hidden) { bringInOpponent(opponentTwo, OPPONENT_TWO_SPEED); } if (score >= OPPONENT_THREE_ENTRANCE_THRESHOLD && opponentThree.hidden) { bringInOpponent(opponentThree, OPPONENT_THREE_SPEED); } scoredAtLeastOneShot = true; } private void newScore(int newScore) { score = newScore; eventBus.sendEvent(EventBus.SCORE_CHANGED, score); } private void addPointText(float x, float y, String pointText, int color) { final TextActor pointTextActor = new TextActor(pointText); x = x - 30; y = y - 70; pointTextActor.setColor(color); pointTextActor.setBold(true); pointTextActor.position.set(x, y); pointTextActor.scale = 5; pointTextActor.zIndex = 1000; actors.add(pointTextActor); effects.add(pointTextActor); tweenManager.add( new ActorTween(pointTextActor) { @Override protected void onFinish() { actors.remove(pointTextActor); effects.remove(pointTextActor); } }.withDuration(POINT_TEXT_ANIMATION_TIME) .withAlpha(1, 0) .toY(y - 80) .withInterpolator(Interpolator.FAST_IN)); } private void bringInOpponent(final ElfActor opponent, final float speed) { EventBus.getInstance().sendEvent(EventBus.PLAY_SOUND, R.raw.present_throw_character_appear); opponent.hidden = false; opponent.enter( new Callback() { @Override public void call() { tweenOpponentRight(opponent, speed); } }); } // Move opponent to the right. Chain a tween to the left at the end for endless motion. private void tweenOpponentRight(final ElfActor opponent, final float speed) { if (state != State.PLAYING) { // Stop moving opponents side-to-side at end of game. tweenOpponentLeft(opponent, speed); return; } tweenOpponent( opponent, 400, speed, new Callback() { @Override public void call() { tweenOpponentLeft(opponent, speed); } }); opponent.swimRight(); } // Move opponent to the left. Chain a tween to the right at the end for endless motion. private void tweenOpponentLeft(final ElfActor opponent, final float speed) { tweenOpponent( opponent, 0, speed, new Callback() { @Override public void call() { tweenOpponentRight(opponent, speed); } }); opponent.swimLeft(); } // Helper function for moving opponents left & right. private void tweenOpponent(ElfActor opponent, float x, float speed, Callback next) { ActorTween tween = new ActorTween(opponent) .toX(x) .withDuration( ActorHelper.distanceBetween(opponent.position.x, 0, x, 0) / speed) .whenFinished(next); tweenManager.add(tween); } private void maybePickUpAnotherBall() { if (state == State.GAME_OVER) { // No-op } else if (ballsThrown == 0) { // No-op } else { // No-op } } public void onFling(final float radians) { if (state == State.GAME_OVER || balls.size() > 0) { return; } if (state == State.WAITING) { instructions.hide(); setState(State.PLAYING); } canThrow = false; // No more throws until ball either goes in goal or leaves screen. tweenManager.add( new EmptyTween(THROW_DELAY_SECONDS) { @Override protected void onFinish() { canThrow = true; } }); ballsThrown++; EventBus.getInstance().sendEvent(EventBus.PLAY_SOUND, R.raw.present_throw_throw); player.throwBall( new Callback() { @Override public void call() { addBall(radians); } }, new Callback() { @Override public void call() { maybePickUpAnotherBall(); } }); } private SpriteActor actorWithIds(int[] ids) { return new SpriteActor(spriteWithIds(ids), Vector2D.get(0, 0), Vector2D.get(0, 0)); } private AnimatedSprite spriteWithIds(int[] ids, int fps) { AnimatedSprite sprite = spriteWithIds(ids); sprite.setFPS(fps); return sprite; } private AnimatedSprite spriteWithIds(int[] ids) { return AnimatedSprite.fromFrames(resources, ids); } private void shake(float screenShakeMagnitude, long vibrationMs) { vibrator.vibrate(vibrationMs); if (screenShakeMagnitude > 0) { cameraShake.shake(33, screenShakeMagnitude, SHAKE_FALLOFF); } } public void setState(State newState) { state = newState; eventBus.sendEvent(EventBus.GAME_STATE_CHANGED, newState); if (newState == State.TITLE) { tweenManager.add( new EmptyTween(titleDurationMs / 1000.0f) { @Override protected void onFinish() { setState(State.WAITING); } }); } } /** High-level phases of the game are controlled by a state machine which uses these states. */ enum State { TITLE, WAITING, PLAYING, GAME_OVER } /** A present that the elf throws. */ class PresentActor extends ThrownActor { public boolean shotBlocked; public int bounces; PresentActor(AnimatedSprite ballSprite) { super(null, ballSprite, null, 3); bounces = 0; shotBlocked = false; } } }
googleapis/google-cloud-java
36,170
java-bare-metal-solution/proto-google-cloud-bare-metal-solution-v2/src/main/java/com/google/cloud/baremetalsolution/v2/ListOSImagesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/baremetalsolution/v2/osimage.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.baremetalsolution.v2; /** * * * <pre> * Request for getting all available OS images. * </pre> * * Protobuf type {@code google.cloud.baremetalsolution.v2.ListOSImagesResponse} */ public final class ListOSImagesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.baremetalsolution.v2.ListOSImagesResponse) ListOSImagesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListOSImagesResponse.newBuilder() to construct. private ListOSImagesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListOSImagesResponse() { osImages_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListOSImagesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.baremetalsolution.v2.OsImageProto .internal_static_google_cloud_baremetalsolution_v2_ListOSImagesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.baremetalsolution.v2.OsImageProto .internal_static_google_cloud_baremetalsolution_v2_ListOSImagesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.baremetalsolution.v2.ListOSImagesResponse.class, com.google.cloud.baremetalsolution.v2.ListOSImagesResponse.Builder.class); } public static final int OS_IMAGES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.baremetalsolution.v2.OSImage> osImages_; /** * * * <pre> * The OS images available. * </pre> * * <code>repeated .google.cloud.baremetalsolution.v2.OSImage os_images = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.baremetalsolution.v2.OSImage> getOsImagesList() { return osImages_; } /** * * * <pre> * The OS images available. * </pre> * * <code>repeated .google.cloud.baremetalsolution.v2.OSImage os_images = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.baremetalsolution.v2.OSImageOrBuilder> getOsImagesOrBuilderList() { return osImages_; } /** * * * <pre> * The OS images available. * </pre> * * <code>repeated .google.cloud.baremetalsolution.v2.OSImage os_images = 1;</code> */ @java.lang.Override public int getOsImagesCount() { return osImages_.size(); } /** * * * <pre> * The OS images available. * </pre> * * <code>repeated .google.cloud.baremetalsolution.v2.OSImage os_images = 1;</code> */ @java.lang.Override public com.google.cloud.baremetalsolution.v2.OSImage getOsImages(int index) { return osImages_.get(index); } /** * * * <pre> * The OS images available. * </pre> * * <code>repeated .google.cloud.baremetalsolution.v2.OSImage os_images = 1;</code> */ @java.lang.Override public com.google.cloud.baremetalsolution.v2.OSImageOrBuilder getOsImagesOrBuilder(int index) { return osImages_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < osImages_.size(); i++) { output.writeMessage(1, osImages_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < osImages_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, osImages_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.baremetalsolution.v2.ListOSImagesResponse)) { return super.equals(obj); } com.google.cloud.baremetalsolution.v2.ListOSImagesResponse other = (com.google.cloud.baremetalsolution.v2.ListOSImagesResponse) obj; if (!getOsImagesList().equals(other.getOsImagesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getOsImagesCount() > 0) { hash = (37 * hash) + OS_IMAGES_FIELD_NUMBER; hash = (53 * hash) + getOsImagesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.baremetalsolution.v2.ListOSImagesResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.baremetalsolution.v2.ListOSImagesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.baremetalsolution.v2.ListOSImagesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.baremetalsolution.v2.ListOSImagesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.baremetalsolution.v2.ListOSImagesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.baremetalsolution.v2.ListOSImagesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.baremetalsolution.v2.ListOSImagesResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.baremetalsolution.v2.ListOSImagesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.baremetalsolution.v2.ListOSImagesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.baremetalsolution.v2.ListOSImagesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.baremetalsolution.v2.ListOSImagesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.baremetalsolution.v2.ListOSImagesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.baremetalsolution.v2.ListOSImagesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request for getting all available OS images. * </pre> * * Protobuf type {@code google.cloud.baremetalsolution.v2.ListOSImagesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.baremetalsolution.v2.ListOSImagesResponse) com.google.cloud.baremetalsolution.v2.ListOSImagesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.baremetalsolution.v2.OsImageProto .internal_static_google_cloud_baremetalsolution_v2_ListOSImagesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.baremetalsolution.v2.OsImageProto .internal_static_google_cloud_baremetalsolution_v2_ListOSImagesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.baremetalsolution.v2.ListOSImagesResponse.class, com.google.cloud.baremetalsolution.v2.ListOSImagesResponse.Builder.class); } // Construct using com.google.cloud.baremetalsolution.v2.ListOSImagesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (osImagesBuilder_ == null) { osImages_ = java.util.Collections.emptyList(); } else { osImages_ = null; osImagesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.baremetalsolution.v2.OsImageProto .internal_static_google_cloud_baremetalsolution_v2_ListOSImagesResponse_descriptor; } @java.lang.Override public com.google.cloud.baremetalsolution.v2.ListOSImagesResponse getDefaultInstanceForType() { return com.google.cloud.baremetalsolution.v2.ListOSImagesResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.baremetalsolution.v2.ListOSImagesResponse build() { com.google.cloud.baremetalsolution.v2.ListOSImagesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.baremetalsolution.v2.ListOSImagesResponse buildPartial() { com.google.cloud.baremetalsolution.v2.ListOSImagesResponse result = new com.google.cloud.baremetalsolution.v2.ListOSImagesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.baremetalsolution.v2.ListOSImagesResponse result) { if (osImagesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { osImages_ = java.util.Collections.unmodifiableList(osImages_); bitField0_ = (bitField0_ & ~0x00000001); } result.osImages_ = osImages_; } else { result.osImages_ = osImagesBuilder_.build(); } } private void buildPartial0(com.google.cloud.baremetalsolution.v2.ListOSImagesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.baremetalsolution.v2.ListOSImagesResponse) { return mergeFrom((com.google.cloud.baremetalsolution.v2.ListOSImagesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.baremetalsolution.v2.ListOSImagesResponse other) { if (other == com.google.cloud.baremetalsolution.v2.ListOSImagesResponse.getDefaultInstance()) return this; if (osImagesBuilder_ == null) { if (!other.osImages_.isEmpty()) { if (osImages_.isEmpty()) { osImages_ = other.osImages_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureOsImagesIsMutable(); osImages_.addAll(other.osImages_); } onChanged(); } } else { if (!other.osImages_.isEmpty()) { if (osImagesBuilder_.isEmpty()) { osImagesBuilder_.dispose(); osImagesBuilder_ = null; osImages_ = other.osImages_; bitField0_ = (bitField0_ & ~0x00000001); osImagesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getOsImagesFieldBuilder() : null; } else { osImagesBuilder_.addAllMessages(other.osImages_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.baremetalsolution.v2.OSImage m = input.readMessage( com.google.cloud.baremetalsolution.v2.OSImage.parser(), extensionRegistry); if (osImagesBuilder_ == null) { ensureOsImagesIsMutable(); osImages_.add(m); } else { osImagesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.baremetalsolution.v2.OSImage> osImages_ = java.util.Collections.emptyList(); private void ensureOsImagesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { osImages_ = new java.util.ArrayList<com.google.cloud.baremetalsolution.v2.OSImage>(osImages_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.baremetalsolution.v2.OSImage, com.google.cloud.baremetalsolution.v2.OSImage.Builder, com.google.cloud.baremetalsolution.v2.OSImageOrBuilder> osImagesBuilder_; /** * * * <pre> * The OS images available. * </pre> * * <code>repeated .google.cloud.baremetalsolution.v2.OSImage os_images = 1;</code> */ public java.util.List<com.google.cloud.baremetalsolution.v2.OSImage> getOsImagesList() { if (osImagesBuilder_ == null) { return java.util.Collections.unmodifiableList(osImages_); } else { return osImagesBuilder_.getMessageList(); } } /** * * * <pre> * The OS images available. * </pre> * * <code>repeated .google.cloud.baremetalsolution.v2.OSImage os_images = 1;</code> */ public int getOsImagesCount() { if (osImagesBuilder_ == null) { return osImages_.size(); } else { return osImagesBuilder_.getCount(); } } /** * * * <pre> * The OS images available. * </pre> * * <code>repeated .google.cloud.baremetalsolution.v2.OSImage os_images = 1;</code> */ public com.google.cloud.baremetalsolution.v2.OSImage getOsImages(int index) { if (osImagesBuilder_ == null) { return osImages_.get(index); } else { return osImagesBuilder_.getMessage(index); } } /** * * * <pre> * The OS images available. * </pre> * * <code>repeated .google.cloud.baremetalsolution.v2.OSImage os_images = 1;</code> */ public Builder setOsImages(int index, com.google.cloud.baremetalsolution.v2.OSImage value) { if (osImagesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureOsImagesIsMutable(); osImages_.set(index, value); onChanged(); } else { osImagesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The OS images available. * </pre> * * <code>repeated .google.cloud.baremetalsolution.v2.OSImage os_images = 1;</code> */ public Builder setOsImages( int index, com.google.cloud.baremetalsolution.v2.OSImage.Builder builderForValue) { if (osImagesBuilder_ == null) { ensureOsImagesIsMutable(); osImages_.set(index, builderForValue.build()); onChanged(); } else { osImagesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The OS images available. * </pre> * * <code>repeated .google.cloud.baremetalsolution.v2.OSImage os_images = 1;</code> */ public Builder addOsImages(com.google.cloud.baremetalsolution.v2.OSImage value) { if (osImagesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureOsImagesIsMutable(); osImages_.add(value); onChanged(); } else { osImagesBuilder_.addMessage(value); } return this; } /** * * * <pre> * The OS images available. * </pre> * * <code>repeated .google.cloud.baremetalsolution.v2.OSImage os_images = 1;</code> */ public Builder addOsImages(int index, com.google.cloud.baremetalsolution.v2.OSImage value) { if (osImagesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureOsImagesIsMutable(); osImages_.add(index, value); onChanged(); } else { osImagesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The OS images available. * </pre> * * <code>repeated .google.cloud.baremetalsolution.v2.OSImage os_images = 1;</code> */ public Builder addOsImages( com.google.cloud.baremetalsolution.v2.OSImage.Builder builderForValue) { if (osImagesBuilder_ == null) { ensureOsImagesIsMutable(); osImages_.add(builderForValue.build()); onChanged(); } else { osImagesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The OS images available. * </pre> * * <code>repeated .google.cloud.baremetalsolution.v2.OSImage os_images = 1;</code> */ public Builder addOsImages( int index, com.google.cloud.baremetalsolution.v2.OSImage.Builder builderForValue) { if (osImagesBuilder_ == null) { ensureOsImagesIsMutable(); osImages_.add(index, builderForValue.build()); onChanged(); } else { osImagesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The OS images available. * </pre> * * <code>repeated .google.cloud.baremetalsolution.v2.OSImage os_images = 1;</code> */ public Builder addAllOsImages( java.lang.Iterable<? extends com.google.cloud.baremetalsolution.v2.OSImage> values) { if (osImagesBuilder_ == null) { ensureOsImagesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, osImages_); onChanged(); } else { osImagesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The OS images available. * </pre> * * <code>repeated .google.cloud.baremetalsolution.v2.OSImage os_images = 1;</code> */ public Builder clearOsImages() { if (osImagesBuilder_ == null) { osImages_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { osImagesBuilder_.clear(); } return this; } /** * * * <pre> * The OS images available. * </pre> * * <code>repeated .google.cloud.baremetalsolution.v2.OSImage os_images = 1;</code> */ public Builder removeOsImages(int index) { if (osImagesBuilder_ == null) { ensureOsImagesIsMutable(); osImages_.remove(index); onChanged(); } else { osImagesBuilder_.remove(index); } return this; } /** * * * <pre> * The OS images available. * </pre> * * <code>repeated .google.cloud.baremetalsolution.v2.OSImage os_images = 1;</code> */ public com.google.cloud.baremetalsolution.v2.OSImage.Builder getOsImagesBuilder(int index) { return getOsImagesFieldBuilder().getBuilder(index); } /** * * * <pre> * The OS images available. * </pre> * * <code>repeated .google.cloud.baremetalsolution.v2.OSImage os_images = 1;</code> */ public com.google.cloud.baremetalsolution.v2.OSImageOrBuilder getOsImagesOrBuilder(int index) { if (osImagesBuilder_ == null) { return osImages_.get(index); } else { return osImagesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The OS images available. * </pre> * * <code>repeated .google.cloud.baremetalsolution.v2.OSImage os_images = 1;</code> */ public java.util.List<? extends com.google.cloud.baremetalsolution.v2.OSImageOrBuilder> getOsImagesOrBuilderList() { if (osImagesBuilder_ != null) { return osImagesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(osImages_); } } /** * * * <pre> * The OS images available. * </pre> * * <code>repeated .google.cloud.baremetalsolution.v2.OSImage os_images = 1;</code> */ public com.google.cloud.baremetalsolution.v2.OSImage.Builder addOsImagesBuilder() { return getOsImagesFieldBuilder() .addBuilder(com.google.cloud.baremetalsolution.v2.OSImage.getDefaultInstance()); } /** * * * <pre> * The OS images available. * </pre> * * <code>repeated .google.cloud.baremetalsolution.v2.OSImage os_images = 1;</code> */ public com.google.cloud.baremetalsolution.v2.OSImage.Builder addOsImagesBuilder(int index) { return getOsImagesFieldBuilder() .addBuilder(index, com.google.cloud.baremetalsolution.v2.OSImage.getDefaultInstance()); } /** * * * <pre> * The OS images available. * </pre> * * <code>repeated .google.cloud.baremetalsolution.v2.OSImage os_images = 1;</code> */ public java.util.List<com.google.cloud.baremetalsolution.v2.OSImage.Builder> getOsImagesBuilderList() { return getOsImagesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.baremetalsolution.v2.OSImage, com.google.cloud.baremetalsolution.v2.OSImage.Builder, com.google.cloud.baremetalsolution.v2.OSImageOrBuilder> getOsImagesFieldBuilder() { if (osImagesBuilder_ == null) { osImagesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.baremetalsolution.v2.OSImage, com.google.cloud.baremetalsolution.v2.OSImage.Builder, com.google.cloud.baremetalsolution.v2.OSImageOrBuilder>( osImages_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); osImages_ = null; } return osImagesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.baremetalsolution.v2.ListOSImagesResponse) } // @@protoc_insertion_point(class_scope:google.cloud.baremetalsolution.v2.ListOSImagesResponse) private static final com.google.cloud.baremetalsolution.v2.ListOSImagesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.baremetalsolution.v2.ListOSImagesResponse(); } public static com.google.cloud.baremetalsolution.v2.ListOSImagesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListOSImagesResponse> PARSER = new com.google.protobuf.AbstractParser<ListOSImagesResponse>() { @java.lang.Override public ListOSImagesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListOSImagesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListOSImagesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.baremetalsolution.v2.ListOSImagesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,251
java-retail/proto-google-cloud-retail-v2alpha/src/main/java/com/google/cloud/retail/v2alpha/UpdateServingConfigRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/retail/v2alpha/serving_config_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.retail.v2alpha; /** * * * <pre> * Request for UpdateServingConfig method. * </pre> * * Protobuf type {@code google.cloud.retail.v2alpha.UpdateServingConfigRequest} */ public final class UpdateServingConfigRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.retail.v2alpha.UpdateServingConfigRequest) UpdateServingConfigRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateServingConfigRequest.newBuilder() to construct. private UpdateServingConfigRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateServingConfigRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateServingConfigRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.retail.v2alpha.ServingConfigServiceProto .internal_static_google_cloud_retail_v2alpha_UpdateServingConfigRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.retail.v2alpha.ServingConfigServiceProto .internal_static_google_cloud_retail_v2alpha_UpdateServingConfigRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.retail.v2alpha.UpdateServingConfigRequest.class, com.google.cloud.retail.v2alpha.UpdateServingConfigRequest.Builder.class); } private int bitField0_; public static final int SERVING_CONFIG_FIELD_NUMBER = 1; private com.google.cloud.retail.v2alpha.ServingConfig servingConfig_; /** * * * <pre> * Required. The ServingConfig to update. * </pre> * * <code> * .google.cloud.retail.v2alpha.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the servingConfig field is set. */ @java.lang.Override public boolean hasServingConfig() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The ServingConfig to update. * </pre> * * <code> * .google.cloud.retail.v2alpha.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The servingConfig. */ @java.lang.Override public com.google.cloud.retail.v2alpha.ServingConfig getServingConfig() { return servingConfig_ == null ? com.google.cloud.retail.v2alpha.ServingConfig.getDefaultInstance() : servingConfig_; } /** * * * <pre> * Required. The ServingConfig to update. * </pre> * * <code> * .google.cloud.retail.v2alpha.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.retail.v2alpha.ServingConfigOrBuilder getServingConfigOrBuilder() { return servingConfig_ == null ? com.google.cloud.retail.v2alpha.ServingConfig.getDefaultInstance() : servingConfig_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Indicates which fields in the provided * [ServingConfig][google.cloud.retail.v2alpha.ServingConfig] to update. The * following are NOT supported: * * * [ServingConfig.name][google.cloud.retail.v2alpha.ServingConfig.name] * * If not set, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Indicates which fields in the provided * [ServingConfig][google.cloud.retail.v2alpha.ServingConfig] to update. The * following are NOT supported: * * * [ServingConfig.name][google.cloud.retail.v2alpha.ServingConfig.name] * * If not set, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Indicates which fields in the provided * [ServingConfig][google.cloud.retail.v2alpha.ServingConfig] to update. The * following are NOT supported: * * * [ServingConfig.name][google.cloud.retail.v2alpha.ServingConfig.name] * * If not set, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getServingConfig()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getServingConfig()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.retail.v2alpha.UpdateServingConfigRequest)) { return super.equals(obj); } com.google.cloud.retail.v2alpha.UpdateServingConfigRequest other = (com.google.cloud.retail.v2alpha.UpdateServingConfigRequest) obj; if (hasServingConfig() != other.hasServingConfig()) return false; if (hasServingConfig()) { if (!getServingConfig().equals(other.getServingConfig())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasServingConfig()) { hash = (37 * hash) + SERVING_CONFIG_FIELD_NUMBER; hash = (53 * hash) + getServingConfig().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.retail.v2alpha.UpdateServingConfigRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.retail.v2alpha.UpdateServingConfigRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.retail.v2alpha.UpdateServingConfigRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.retail.v2alpha.UpdateServingConfigRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.retail.v2alpha.UpdateServingConfigRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.retail.v2alpha.UpdateServingConfigRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.retail.v2alpha.UpdateServingConfigRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.retail.v2alpha.UpdateServingConfigRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.retail.v2alpha.UpdateServingConfigRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.retail.v2alpha.UpdateServingConfigRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.retail.v2alpha.UpdateServingConfigRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.retail.v2alpha.UpdateServingConfigRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.retail.v2alpha.UpdateServingConfigRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request for UpdateServingConfig method. * </pre> * * Protobuf type {@code google.cloud.retail.v2alpha.UpdateServingConfigRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.retail.v2alpha.UpdateServingConfigRequest) com.google.cloud.retail.v2alpha.UpdateServingConfigRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.retail.v2alpha.ServingConfigServiceProto .internal_static_google_cloud_retail_v2alpha_UpdateServingConfigRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.retail.v2alpha.ServingConfigServiceProto .internal_static_google_cloud_retail_v2alpha_UpdateServingConfigRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.retail.v2alpha.UpdateServingConfigRequest.class, com.google.cloud.retail.v2alpha.UpdateServingConfigRequest.Builder.class); } // Construct using com.google.cloud.retail.v2alpha.UpdateServingConfigRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getServingConfigFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; servingConfig_ = null; if (servingConfigBuilder_ != null) { servingConfigBuilder_.dispose(); servingConfigBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.retail.v2alpha.ServingConfigServiceProto .internal_static_google_cloud_retail_v2alpha_UpdateServingConfigRequest_descriptor; } @java.lang.Override public com.google.cloud.retail.v2alpha.UpdateServingConfigRequest getDefaultInstanceForType() { return com.google.cloud.retail.v2alpha.UpdateServingConfigRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.retail.v2alpha.UpdateServingConfigRequest build() { com.google.cloud.retail.v2alpha.UpdateServingConfigRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.retail.v2alpha.UpdateServingConfigRequest buildPartial() { com.google.cloud.retail.v2alpha.UpdateServingConfigRequest result = new com.google.cloud.retail.v2alpha.UpdateServingConfigRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.retail.v2alpha.UpdateServingConfigRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.servingConfig_ = servingConfigBuilder_ == null ? servingConfig_ : servingConfigBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.retail.v2alpha.UpdateServingConfigRequest) { return mergeFrom((com.google.cloud.retail.v2alpha.UpdateServingConfigRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.retail.v2alpha.UpdateServingConfigRequest other) { if (other == com.google.cloud.retail.v2alpha.UpdateServingConfigRequest.getDefaultInstance()) return this; if (other.hasServingConfig()) { mergeServingConfig(other.getServingConfig()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getServingConfigFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.retail.v2alpha.ServingConfig servingConfig_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.retail.v2alpha.ServingConfig, com.google.cloud.retail.v2alpha.ServingConfig.Builder, com.google.cloud.retail.v2alpha.ServingConfigOrBuilder> servingConfigBuilder_; /** * * * <pre> * Required. The ServingConfig to update. * </pre> * * <code> * .google.cloud.retail.v2alpha.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the servingConfig field is set. */ public boolean hasServingConfig() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The ServingConfig to update. * </pre> * * <code> * .google.cloud.retail.v2alpha.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The servingConfig. */ public com.google.cloud.retail.v2alpha.ServingConfig getServingConfig() { if (servingConfigBuilder_ == null) { return servingConfig_ == null ? com.google.cloud.retail.v2alpha.ServingConfig.getDefaultInstance() : servingConfig_; } else { return servingConfigBuilder_.getMessage(); } } /** * * * <pre> * Required. The ServingConfig to update. * </pre> * * <code> * .google.cloud.retail.v2alpha.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setServingConfig(com.google.cloud.retail.v2alpha.ServingConfig value) { if (servingConfigBuilder_ == null) { if (value == null) { throw new NullPointerException(); } servingConfig_ = value; } else { servingConfigBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The ServingConfig to update. * </pre> * * <code> * .google.cloud.retail.v2alpha.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setServingConfig( com.google.cloud.retail.v2alpha.ServingConfig.Builder builderForValue) { if (servingConfigBuilder_ == null) { servingConfig_ = builderForValue.build(); } else { servingConfigBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The ServingConfig to update. * </pre> * * <code> * .google.cloud.retail.v2alpha.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeServingConfig(com.google.cloud.retail.v2alpha.ServingConfig value) { if (servingConfigBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && servingConfig_ != null && servingConfig_ != com.google.cloud.retail.v2alpha.ServingConfig.getDefaultInstance()) { getServingConfigBuilder().mergeFrom(value); } else { servingConfig_ = value; } } else { servingConfigBuilder_.mergeFrom(value); } if (servingConfig_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The ServingConfig to update. * </pre> * * <code> * .google.cloud.retail.v2alpha.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearServingConfig() { bitField0_ = (bitField0_ & ~0x00000001); servingConfig_ = null; if (servingConfigBuilder_ != null) { servingConfigBuilder_.dispose(); servingConfigBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The ServingConfig to update. * </pre> * * <code> * .google.cloud.retail.v2alpha.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.retail.v2alpha.ServingConfig.Builder getServingConfigBuilder() { bitField0_ |= 0x00000001; onChanged(); return getServingConfigFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The ServingConfig to update. * </pre> * * <code> * .google.cloud.retail.v2alpha.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.retail.v2alpha.ServingConfigOrBuilder getServingConfigOrBuilder() { if (servingConfigBuilder_ != null) { return servingConfigBuilder_.getMessageOrBuilder(); } else { return servingConfig_ == null ? com.google.cloud.retail.v2alpha.ServingConfig.getDefaultInstance() : servingConfig_; } } /** * * * <pre> * Required. The ServingConfig to update. * </pre> * * <code> * .google.cloud.retail.v2alpha.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.retail.v2alpha.ServingConfig, com.google.cloud.retail.v2alpha.ServingConfig.Builder, com.google.cloud.retail.v2alpha.ServingConfigOrBuilder> getServingConfigFieldBuilder() { if (servingConfigBuilder_ == null) { servingConfigBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.retail.v2alpha.ServingConfig, com.google.cloud.retail.v2alpha.ServingConfig.Builder, com.google.cloud.retail.v2alpha.ServingConfigOrBuilder>( getServingConfig(), getParentForChildren(), isClean()); servingConfig_ = null; } return servingConfigBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Indicates which fields in the provided * [ServingConfig][google.cloud.retail.v2alpha.ServingConfig] to update. The * following are NOT supported: * * * [ServingConfig.name][google.cloud.retail.v2alpha.ServingConfig.name] * * If not set, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Indicates which fields in the provided * [ServingConfig][google.cloud.retail.v2alpha.ServingConfig] to update. The * following are NOT supported: * * * [ServingConfig.name][google.cloud.retail.v2alpha.ServingConfig.name] * * If not set, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Indicates which fields in the provided * [ServingConfig][google.cloud.retail.v2alpha.ServingConfig] to update. The * following are NOT supported: * * * [ServingConfig.name][google.cloud.retail.v2alpha.ServingConfig.name] * * If not set, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Indicates which fields in the provided * [ServingConfig][google.cloud.retail.v2alpha.ServingConfig] to update. The * following are NOT supported: * * * [ServingConfig.name][google.cloud.retail.v2alpha.ServingConfig.name] * * If not set, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Indicates which fields in the provided * [ServingConfig][google.cloud.retail.v2alpha.ServingConfig] to update. The * following are NOT supported: * * * [ServingConfig.name][google.cloud.retail.v2alpha.ServingConfig.name] * * If not set, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Indicates which fields in the provided * [ServingConfig][google.cloud.retail.v2alpha.ServingConfig] to update. The * following are NOT supported: * * * [ServingConfig.name][google.cloud.retail.v2alpha.ServingConfig.name] * * If not set, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Indicates which fields in the provided * [ServingConfig][google.cloud.retail.v2alpha.ServingConfig] to update. The * following are NOT supported: * * * [ServingConfig.name][google.cloud.retail.v2alpha.ServingConfig.name] * * If not set, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Indicates which fields in the provided * [ServingConfig][google.cloud.retail.v2alpha.ServingConfig] to update. The * following are NOT supported: * * * [ServingConfig.name][google.cloud.retail.v2alpha.ServingConfig.name] * * If not set, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Indicates which fields in the provided * [ServingConfig][google.cloud.retail.v2alpha.ServingConfig] to update. The * following are NOT supported: * * * [ServingConfig.name][google.cloud.retail.v2alpha.ServingConfig.name] * * If not set, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.retail.v2alpha.UpdateServingConfigRequest) } // @@protoc_insertion_point(class_scope:google.cloud.retail.v2alpha.UpdateServingConfigRequest) private static final com.google.cloud.retail.v2alpha.UpdateServingConfigRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.retail.v2alpha.UpdateServingConfigRequest(); } public static com.google.cloud.retail.v2alpha.UpdateServingConfigRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateServingConfigRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateServingConfigRequest>() { @java.lang.Override public UpdateServingConfigRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateServingConfigRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateServingConfigRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.retail.v2alpha.UpdateServingConfigRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }