repo_id stringclasses 875
values | size int64 974 38.9k | file_path stringlengths 10 308 | content stringlengths 974 38.9k |
|---|---|---|---|
googleapis/google-cloud-java | 36,985 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/ListAnnotationsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/dataset_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Response message for
* [DatasetService.ListAnnotations][google.cloud.aiplatform.v1.DatasetService.ListAnnotations].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ListAnnotationsResponse}
*/
public final class ListAnnotationsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.ListAnnotationsResponse)
ListAnnotationsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListAnnotationsResponse.newBuilder() to construct.
private ListAnnotationsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListAnnotationsResponse() {
annotations_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListAnnotationsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1_ListAnnotationsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1_ListAnnotationsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ListAnnotationsResponse.class,
com.google.cloud.aiplatform.v1.ListAnnotationsResponse.Builder.class);
}
public static final int ANNOTATIONS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.aiplatform.v1.Annotation> annotations_;
/**
*
*
* <pre>
* A list of Annotations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Annotation annotations = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.aiplatform.v1.Annotation> getAnnotationsList() {
return annotations_;
}
/**
*
*
* <pre>
* A list of Annotations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Annotation annotations = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.aiplatform.v1.AnnotationOrBuilder>
getAnnotationsOrBuilderList() {
return annotations_;
}
/**
*
*
* <pre>
* A list of Annotations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Annotation annotations = 1;</code>
*/
@java.lang.Override
public int getAnnotationsCount() {
return annotations_.size();
}
/**
*
*
* <pre>
* A list of Annotations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Annotation annotations = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.Annotation getAnnotations(int index) {
return annotations_.get(index);
}
/**
*
*
* <pre>
* A list of Annotations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Annotation annotations = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.AnnotationOrBuilder getAnnotationsOrBuilder(int index) {
return annotations_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The standard List next-page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The standard List next-page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < annotations_.size(); i++) {
output.writeMessage(1, annotations_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < annotations_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, annotations_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.ListAnnotationsResponse)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.ListAnnotationsResponse other =
(com.google.cloud.aiplatform.v1.ListAnnotationsResponse) obj;
if (!getAnnotationsList().equals(other.getAnnotationsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAnnotationsCount() > 0) {
hash = (37 * hash) + ANNOTATIONS_FIELD_NUMBER;
hash = (53 * hash) + getAnnotationsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.ListAnnotationsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ListAnnotationsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListAnnotationsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ListAnnotationsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListAnnotationsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ListAnnotationsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListAnnotationsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ListAnnotationsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListAnnotationsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ListAnnotationsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListAnnotationsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ListAnnotationsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1.ListAnnotationsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [DatasetService.ListAnnotations][google.cloud.aiplatform.v1.DatasetService.ListAnnotations].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ListAnnotationsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.ListAnnotationsResponse)
com.google.cloud.aiplatform.v1.ListAnnotationsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1_ListAnnotationsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1_ListAnnotationsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ListAnnotationsResponse.class,
com.google.cloud.aiplatform.v1.ListAnnotationsResponse.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.ListAnnotationsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (annotationsBuilder_ == null) {
annotations_ = java.util.Collections.emptyList();
} else {
annotations_ = null;
annotationsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1_ListAnnotationsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListAnnotationsResponse getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.ListAnnotationsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListAnnotationsResponse build() {
com.google.cloud.aiplatform.v1.ListAnnotationsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListAnnotationsResponse buildPartial() {
com.google.cloud.aiplatform.v1.ListAnnotationsResponse result =
new com.google.cloud.aiplatform.v1.ListAnnotationsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.aiplatform.v1.ListAnnotationsResponse result) {
if (annotationsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
annotations_ = java.util.Collections.unmodifiableList(annotations_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.annotations_ = annotations_;
} else {
result.annotations_ = annotationsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.aiplatform.v1.ListAnnotationsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.ListAnnotationsResponse) {
return mergeFrom((com.google.cloud.aiplatform.v1.ListAnnotationsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.ListAnnotationsResponse other) {
if (other == com.google.cloud.aiplatform.v1.ListAnnotationsResponse.getDefaultInstance())
return this;
if (annotationsBuilder_ == null) {
if (!other.annotations_.isEmpty()) {
if (annotations_.isEmpty()) {
annotations_ = other.annotations_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAnnotationsIsMutable();
annotations_.addAll(other.annotations_);
}
onChanged();
}
} else {
if (!other.annotations_.isEmpty()) {
if (annotationsBuilder_.isEmpty()) {
annotationsBuilder_.dispose();
annotationsBuilder_ = null;
annotations_ = other.annotations_;
bitField0_ = (bitField0_ & ~0x00000001);
annotationsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getAnnotationsFieldBuilder()
: null;
} else {
annotationsBuilder_.addAllMessages(other.annotations_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.aiplatform.v1.Annotation m =
input.readMessage(
com.google.cloud.aiplatform.v1.Annotation.parser(), extensionRegistry);
if (annotationsBuilder_ == null) {
ensureAnnotationsIsMutable();
annotations_.add(m);
} else {
annotationsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.aiplatform.v1.Annotation> annotations_ =
java.util.Collections.emptyList();
private void ensureAnnotationsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
annotations_ =
new java.util.ArrayList<com.google.cloud.aiplatform.v1.Annotation>(annotations_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Annotation,
com.google.cloud.aiplatform.v1.Annotation.Builder,
com.google.cloud.aiplatform.v1.AnnotationOrBuilder>
annotationsBuilder_;
/**
*
*
* <pre>
* A list of Annotations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Annotation annotations = 1;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.Annotation> getAnnotationsList() {
if (annotationsBuilder_ == null) {
return java.util.Collections.unmodifiableList(annotations_);
} else {
return annotationsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* A list of Annotations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Annotation annotations = 1;</code>
*/
public int getAnnotationsCount() {
if (annotationsBuilder_ == null) {
return annotations_.size();
} else {
return annotationsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* A list of Annotations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Annotation annotations = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Annotation getAnnotations(int index) {
if (annotationsBuilder_ == null) {
return annotations_.get(index);
} else {
return annotationsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* A list of Annotations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Annotation annotations = 1;</code>
*/
public Builder setAnnotations(int index, com.google.cloud.aiplatform.v1.Annotation value) {
if (annotationsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnnotationsIsMutable();
annotations_.set(index, value);
onChanged();
} else {
annotationsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A list of Annotations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Annotation annotations = 1;</code>
*/
public Builder setAnnotations(
int index, com.google.cloud.aiplatform.v1.Annotation.Builder builderForValue) {
if (annotationsBuilder_ == null) {
ensureAnnotationsIsMutable();
annotations_.set(index, builderForValue.build());
onChanged();
} else {
annotationsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of Annotations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Annotation annotations = 1;</code>
*/
public Builder addAnnotations(com.google.cloud.aiplatform.v1.Annotation value) {
if (annotationsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnnotationsIsMutable();
annotations_.add(value);
onChanged();
} else {
annotationsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* A list of Annotations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Annotation annotations = 1;</code>
*/
public Builder addAnnotations(int index, com.google.cloud.aiplatform.v1.Annotation value) {
if (annotationsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnnotationsIsMutable();
annotations_.add(index, value);
onChanged();
} else {
annotationsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A list of Annotations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Annotation annotations = 1;</code>
*/
public Builder addAnnotations(
com.google.cloud.aiplatform.v1.Annotation.Builder builderForValue) {
if (annotationsBuilder_ == null) {
ensureAnnotationsIsMutable();
annotations_.add(builderForValue.build());
onChanged();
} else {
annotationsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of Annotations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Annotation annotations = 1;</code>
*/
public Builder addAnnotations(
int index, com.google.cloud.aiplatform.v1.Annotation.Builder builderForValue) {
if (annotationsBuilder_ == null) {
ensureAnnotationsIsMutable();
annotations_.add(index, builderForValue.build());
onChanged();
} else {
annotationsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of Annotations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Annotation annotations = 1;</code>
*/
public Builder addAllAnnotations(
java.lang.Iterable<? extends com.google.cloud.aiplatform.v1.Annotation> values) {
if (annotationsBuilder_ == null) {
ensureAnnotationsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, annotations_);
onChanged();
} else {
annotationsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* A list of Annotations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Annotation annotations = 1;</code>
*/
public Builder clearAnnotations() {
if (annotationsBuilder_ == null) {
annotations_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
annotationsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* A list of Annotations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Annotation annotations = 1;</code>
*/
public Builder removeAnnotations(int index) {
if (annotationsBuilder_ == null) {
ensureAnnotationsIsMutable();
annotations_.remove(index);
onChanged();
} else {
annotationsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* A list of Annotations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Annotation annotations = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Annotation.Builder getAnnotationsBuilder(int index) {
return getAnnotationsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* A list of Annotations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Annotation annotations = 1;</code>
*/
public com.google.cloud.aiplatform.v1.AnnotationOrBuilder getAnnotationsOrBuilder(int index) {
if (annotationsBuilder_ == null) {
return annotations_.get(index);
} else {
return annotationsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* A list of Annotations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Annotation annotations = 1;</code>
*/
public java.util.List<? extends com.google.cloud.aiplatform.v1.AnnotationOrBuilder>
getAnnotationsOrBuilderList() {
if (annotationsBuilder_ != null) {
return annotationsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(annotations_);
}
}
/**
*
*
* <pre>
* A list of Annotations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Annotation annotations = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Annotation.Builder addAnnotationsBuilder() {
return getAnnotationsFieldBuilder()
.addBuilder(com.google.cloud.aiplatform.v1.Annotation.getDefaultInstance());
}
/**
*
*
* <pre>
* A list of Annotations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Annotation annotations = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Annotation.Builder addAnnotationsBuilder(int index) {
return getAnnotationsFieldBuilder()
.addBuilder(index, com.google.cloud.aiplatform.v1.Annotation.getDefaultInstance());
}
/**
*
*
* <pre>
* A list of Annotations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Annotation annotations = 1;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.Annotation.Builder>
getAnnotationsBuilderList() {
return getAnnotationsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Annotation,
com.google.cloud.aiplatform.v1.Annotation.Builder,
com.google.cloud.aiplatform.v1.AnnotationOrBuilder>
getAnnotationsFieldBuilder() {
if (annotationsBuilder_ == null) {
annotationsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Annotation,
com.google.cloud.aiplatform.v1.Annotation.Builder,
com.google.cloud.aiplatform.v1.AnnotationOrBuilder>(
annotations_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
annotations_ = null;
}
return annotationsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The standard List next-page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The standard List next-page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The standard List next-page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The standard List next-page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The standard List next-page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.ListAnnotationsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.ListAnnotationsResponse)
private static final com.google.cloud.aiplatform.v1.ListAnnotationsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.ListAnnotationsResponse();
}
public static com.google.cloud.aiplatform.v1.ListAnnotationsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListAnnotationsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListAnnotationsResponse>() {
@java.lang.Override
public ListAnnotationsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListAnnotationsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListAnnotationsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListAnnotationsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 37,221 | google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/resources/AssetFieldTypePolicySummary.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v19/resources/asset.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v19.resources;
/**
* <pre>
* Contains policy information for an asset under AssetFieldType context.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.resources.AssetFieldTypePolicySummary}
*/
public final class AssetFieldTypePolicySummary extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v19.resources.AssetFieldTypePolicySummary)
AssetFieldTypePolicySummaryOrBuilder {
private static final long serialVersionUID = 0L;
// Use AssetFieldTypePolicySummary.newBuilder() to construct.
private AssetFieldTypePolicySummary(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private AssetFieldTypePolicySummary() {
assetFieldType_ = 0;
assetSource_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new AssetFieldTypePolicySummary();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.resources.AssetProto.internal_static_google_ads_googleads_v19_resources_AssetFieldTypePolicySummary_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.resources.AssetProto.internal_static_google_ads_googleads_v19_resources_AssetFieldTypePolicySummary_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary.class, com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary.Builder.class);
}
private int bitField0_;
public static final int ASSET_FIELD_TYPE_FIELD_NUMBER = 1;
private int assetFieldType_ = 0;
/**
* <pre>
* Output only. FieldType of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v19.enums.AssetFieldTypeEnum.AssetFieldType asset_field_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the assetFieldType field is set.
*/
@java.lang.Override public boolean hasAssetFieldType() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* Output only. FieldType of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v19.enums.AssetFieldTypeEnum.AssetFieldType asset_field_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The enum numeric value on the wire for assetFieldType.
*/
@java.lang.Override public int getAssetFieldTypeValue() {
return assetFieldType_;
}
/**
* <pre>
* Output only. FieldType of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v19.enums.AssetFieldTypeEnum.AssetFieldType asset_field_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The assetFieldType.
*/
@java.lang.Override public com.google.ads.googleads.v19.enums.AssetFieldTypeEnum.AssetFieldType getAssetFieldType() {
com.google.ads.googleads.v19.enums.AssetFieldTypeEnum.AssetFieldType result = com.google.ads.googleads.v19.enums.AssetFieldTypeEnum.AssetFieldType.forNumber(assetFieldType_);
return result == null ? com.google.ads.googleads.v19.enums.AssetFieldTypeEnum.AssetFieldType.UNRECOGNIZED : result;
}
public static final int ASSET_SOURCE_FIELD_NUMBER = 2;
private int assetSource_ = 0;
/**
* <pre>
* Output only. Source of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v19.enums.AssetSourceEnum.AssetSource asset_source = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the assetSource field is set.
*/
@java.lang.Override public boolean hasAssetSource() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Output only. Source of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v19.enums.AssetSourceEnum.AssetSource asset_source = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The enum numeric value on the wire for assetSource.
*/
@java.lang.Override public int getAssetSourceValue() {
return assetSource_;
}
/**
* <pre>
* Output only. Source of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v19.enums.AssetSourceEnum.AssetSource asset_source = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The assetSource.
*/
@java.lang.Override public com.google.ads.googleads.v19.enums.AssetSourceEnum.AssetSource getAssetSource() {
com.google.ads.googleads.v19.enums.AssetSourceEnum.AssetSource result = com.google.ads.googleads.v19.enums.AssetSourceEnum.AssetSource.forNumber(assetSource_);
return result == null ? com.google.ads.googleads.v19.enums.AssetSourceEnum.AssetSource.UNRECOGNIZED : result;
}
public static final int POLICY_SUMMARY_INFO_FIELD_NUMBER = 3;
private com.google.ads.googleads.v19.resources.AssetPolicySummary policySummaryInfo_;
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v19.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the policySummaryInfo field is set.
*/
@java.lang.Override
public boolean hasPolicySummaryInfo() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v19.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The policySummaryInfo.
*/
@java.lang.Override
public com.google.ads.googleads.v19.resources.AssetPolicySummary getPolicySummaryInfo() {
return policySummaryInfo_ == null ? com.google.ads.googleads.v19.resources.AssetPolicySummary.getDefaultInstance() : policySummaryInfo_;
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v19.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
@java.lang.Override
public com.google.ads.googleads.v19.resources.AssetPolicySummaryOrBuilder getPolicySummaryInfoOrBuilder() {
return policySummaryInfo_ == null ? com.google.ads.googleads.v19.resources.AssetPolicySummary.getDefaultInstance() : policySummaryInfo_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeEnum(1, assetFieldType_);
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeEnum(2, assetSource_);
}
if (((bitField0_ & 0x00000004) != 0)) {
output.writeMessage(3, getPolicySummaryInfo());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(1, assetFieldType_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(2, assetSource_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, getPolicySummaryInfo());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary)) {
return super.equals(obj);
}
com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary other = (com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary) obj;
if (hasAssetFieldType() != other.hasAssetFieldType()) return false;
if (hasAssetFieldType()) {
if (assetFieldType_ != other.assetFieldType_) return false;
}
if (hasAssetSource() != other.hasAssetSource()) return false;
if (hasAssetSource()) {
if (assetSource_ != other.assetSource_) return false;
}
if (hasPolicySummaryInfo() != other.hasPolicySummaryInfo()) return false;
if (hasPolicySummaryInfo()) {
if (!getPolicySummaryInfo()
.equals(other.getPolicySummaryInfo())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasAssetFieldType()) {
hash = (37 * hash) + ASSET_FIELD_TYPE_FIELD_NUMBER;
hash = (53 * hash) + assetFieldType_;
}
if (hasAssetSource()) {
hash = (37 * hash) + ASSET_SOURCE_FIELD_NUMBER;
hash = (53 * hash) + assetSource_;
}
if (hasPolicySummaryInfo()) {
hash = (37 * hash) + POLICY_SUMMARY_INFO_FIELD_NUMBER;
hash = (53 * hash) + getPolicySummaryInfo().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Contains policy information for an asset under AssetFieldType context.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.resources.AssetFieldTypePolicySummary}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.resources.AssetFieldTypePolicySummary)
com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummaryOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.resources.AssetProto.internal_static_google_ads_googleads_v19_resources_AssetFieldTypePolicySummary_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.resources.AssetProto.internal_static_google_ads_googleads_v19_resources_AssetFieldTypePolicySummary_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary.class, com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary.Builder.class);
}
// Construct using com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getPolicySummaryInfoFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
assetFieldType_ = 0;
assetSource_ = 0;
policySummaryInfo_ = null;
if (policySummaryInfoBuilder_ != null) {
policySummaryInfoBuilder_.dispose();
policySummaryInfoBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v19.resources.AssetProto.internal_static_google_ads_googleads_v19_resources_AssetFieldTypePolicySummary_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary getDefaultInstanceForType() {
return com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary build() {
com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary buildPartial() {
com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary result = new com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.assetFieldType_ = assetFieldType_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.assetSource_ = assetSource_;
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.policySummaryInfo_ = policySummaryInfoBuilder_ == null
? policySummaryInfo_
: policySummaryInfoBuilder_.build();
to_bitField0_ |= 0x00000004;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary) {
return mergeFrom((com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary other) {
if (other == com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary.getDefaultInstance()) return this;
if (other.hasAssetFieldType()) {
setAssetFieldType(other.getAssetFieldType());
}
if (other.hasAssetSource()) {
setAssetSource(other.getAssetSource());
}
if (other.hasPolicySummaryInfo()) {
mergePolicySummaryInfo(other.getPolicySummaryInfo());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
assetFieldType_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 16: {
assetSource_ = input.readEnum();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26: {
input.readMessage(
getPolicySummaryInfoFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int assetFieldType_ = 0;
/**
* <pre>
* Output only. FieldType of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v19.enums.AssetFieldTypeEnum.AssetFieldType asset_field_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the assetFieldType field is set.
*/
@java.lang.Override public boolean hasAssetFieldType() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* Output only. FieldType of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v19.enums.AssetFieldTypeEnum.AssetFieldType asset_field_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The enum numeric value on the wire for assetFieldType.
*/
@java.lang.Override public int getAssetFieldTypeValue() {
return assetFieldType_;
}
/**
* <pre>
* Output only. FieldType of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v19.enums.AssetFieldTypeEnum.AssetFieldType asset_field_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @param value The enum numeric value on the wire for assetFieldType to set.
* @return This builder for chaining.
*/
public Builder setAssetFieldTypeValue(int value) {
assetFieldType_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* Output only. FieldType of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v19.enums.AssetFieldTypeEnum.AssetFieldType asset_field_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The assetFieldType.
*/
@java.lang.Override
public com.google.ads.googleads.v19.enums.AssetFieldTypeEnum.AssetFieldType getAssetFieldType() {
com.google.ads.googleads.v19.enums.AssetFieldTypeEnum.AssetFieldType result = com.google.ads.googleads.v19.enums.AssetFieldTypeEnum.AssetFieldType.forNumber(assetFieldType_);
return result == null ? com.google.ads.googleads.v19.enums.AssetFieldTypeEnum.AssetFieldType.UNRECOGNIZED : result;
}
/**
* <pre>
* Output only. FieldType of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v19.enums.AssetFieldTypeEnum.AssetFieldType asset_field_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @param value The assetFieldType to set.
* @return This builder for chaining.
*/
public Builder setAssetFieldType(com.google.ads.googleads.v19.enums.AssetFieldTypeEnum.AssetFieldType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
assetFieldType_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* Output only. FieldType of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v19.enums.AssetFieldTypeEnum.AssetFieldType asset_field_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return This builder for chaining.
*/
public Builder clearAssetFieldType() {
bitField0_ = (bitField0_ & ~0x00000001);
assetFieldType_ = 0;
onChanged();
return this;
}
private int assetSource_ = 0;
/**
* <pre>
* Output only. Source of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v19.enums.AssetSourceEnum.AssetSource asset_source = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the assetSource field is set.
*/
@java.lang.Override public boolean hasAssetSource() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Output only. Source of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v19.enums.AssetSourceEnum.AssetSource asset_source = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The enum numeric value on the wire for assetSource.
*/
@java.lang.Override public int getAssetSourceValue() {
return assetSource_;
}
/**
* <pre>
* Output only. Source of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v19.enums.AssetSourceEnum.AssetSource asset_source = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @param value The enum numeric value on the wire for assetSource to set.
* @return This builder for chaining.
*/
public Builder setAssetSourceValue(int value) {
assetSource_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Output only. Source of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v19.enums.AssetSourceEnum.AssetSource asset_source = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The assetSource.
*/
@java.lang.Override
public com.google.ads.googleads.v19.enums.AssetSourceEnum.AssetSource getAssetSource() {
com.google.ads.googleads.v19.enums.AssetSourceEnum.AssetSource result = com.google.ads.googleads.v19.enums.AssetSourceEnum.AssetSource.forNumber(assetSource_);
return result == null ? com.google.ads.googleads.v19.enums.AssetSourceEnum.AssetSource.UNRECOGNIZED : result;
}
/**
* <pre>
* Output only. Source of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v19.enums.AssetSourceEnum.AssetSource asset_source = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @param value The assetSource to set.
* @return This builder for chaining.
*/
public Builder setAssetSource(com.google.ads.googleads.v19.enums.AssetSourceEnum.AssetSource value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
assetSource_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* Output only. Source of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v19.enums.AssetSourceEnum.AssetSource asset_source = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return This builder for chaining.
*/
public Builder clearAssetSource() {
bitField0_ = (bitField0_ & ~0x00000002);
assetSource_ = 0;
onChanged();
return this;
}
private com.google.ads.googleads.v19.resources.AssetPolicySummary policySummaryInfo_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v19.resources.AssetPolicySummary, com.google.ads.googleads.v19.resources.AssetPolicySummary.Builder, com.google.ads.googleads.v19.resources.AssetPolicySummaryOrBuilder> policySummaryInfoBuilder_;
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v19.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the policySummaryInfo field is set.
*/
public boolean hasPolicySummaryInfo() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v19.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The policySummaryInfo.
*/
public com.google.ads.googleads.v19.resources.AssetPolicySummary getPolicySummaryInfo() {
if (policySummaryInfoBuilder_ == null) {
return policySummaryInfo_ == null ? com.google.ads.googleads.v19.resources.AssetPolicySummary.getDefaultInstance() : policySummaryInfo_;
} else {
return policySummaryInfoBuilder_.getMessage();
}
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v19.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public Builder setPolicySummaryInfo(com.google.ads.googleads.v19.resources.AssetPolicySummary value) {
if (policySummaryInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
policySummaryInfo_ = value;
} else {
policySummaryInfoBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v19.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public Builder setPolicySummaryInfo(
com.google.ads.googleads.v19.resources.AssetPolicySummary.Builder builderForValue) {
if (policySummaryInfoBuilder_ == null) {
policySummaryInfo_ = builderForValue.build();
} else {
policySummaryInfoBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v19.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public Builder mergePolicySummaryInfo(com.google.ads.googleads.v19.resources.AssetPolicySummary value) {
if (policySummaryInfoBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0) &&
policySummaryInfo_ != null &&
policySummaryInfo_ != com.google.ads.googleads.v19.resources.AssetPolicySummary.getDefaultInstance()) {
getPolicySummaryInfoBuilder().mergeFrom(value);
} else {
policySummaryInfo_ = value;
}
} else {
policySummaryInfoBuilder_.mergeFrom(value);
}
if (policySummaryInfo_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v19.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public Builder clearPolicySummaryInfo() {
bitField0_ = (bitField0_ & ~0x00000004);
policySummaryInfo_ = null;
if (policySummaryInfoBuilder_ != null) {
policySummaryInfoBuilder_.dispose();
policySummaryInfoBuilder_ = null;
}
onChanged();
return this;
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v19.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public com.google.ads.googleads.v19.resources.AssetPolicySummary.Builder getPolicySummaryInfoBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getPolicySummaryInfoFieldBuilder().getBuilder();
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v19.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public com.google.ads.googleads.v19.resources.AssetPolicySummaryOrBuilder getPolicySummaryInfoOrBuilder() {
if (policySummaryInfoBuilder_ != null) {
return policySummaryInfoBuilder_.getMessageOrBuilder();
} else {
return policySummaryInfo_ == null ?
com.google.ads.googleads.v19.resources.AssetPolicySummary.getDefaultInstance() : policySummaryInfo_;
}
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v19.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v19.resources.AssetPolicySummary, com.google.ads.googleads.v19.resources.AssetPolicySummary.Builder, com.google.ads.googleads.v19.resources.AssetPolicySummaryOrBuilder>
getPolicySummaryInfoFieldBuilder() {
if (policySummaryInfoBuilder_ == null) {
policySummaryInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v19.resources.AssetPolicySummary, com.google.ads.googleads.v19.resources.AssetPolicySummary.Builder, com.google.ads.googleads.v19.resources.AssetPolicySummaryOrBuilder>(
getPolicySummaryInfo(),
getParentForChildren(),
isClean());
policySummaryInfo_ = null;
}
return policySummaryInfoBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.resources.AssetFieldTypePolicySummary)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v19.resources.AssetFieldTypePolicySummary)
private static final com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary();
}
public static com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<AssetFieldTypePolicySummary>
PARSER = new com.google.protobuf.AbstractParser<AssetFieldTypePolicySummary>() {
@java.lang.Override
public AssetFieldTypePolicySummary parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<AssetFieldTypePolicySummary> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<AssetFieldTypePolicySummary> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 37,221 | google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/resources/AssetFieldTypePolicySummary.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v20/resources/asset.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v20.resources;
/**
* <pre>
* Contains policy information for an asset under AssetFieldType context.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.resources.AssetFieldTypePolicySummary}
*/
public final class AssetFieldTypePolicySummary extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v20.resources.AssetFieldTypePolicySummary)
AssetFieldTypePolicySummaryOrBuilder {
private static final long serialVersionUID = 0L;
// Use AssetFieldTypePolicySummary.newBuilder() to construct.
private AssetFieldTypePolicySummary(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private AssetFieldTypePolicySummary() {
assetFieldType_ = 0;
assetSource_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new AssetFieldTypePolicySummary();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.resources.AssetProto.internal_static_google_ads_googleads_v20_resources_AssetFieldTypePolicySummary_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.resources.AssetProto.internal_static_google_ads_googleads_v20_resources_AssetFieldTypePolicySummary_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary.class, com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary.Builder.class);
}
private int bitField0_;
public static final int ASSET_FIELD_TYPE_FIELD_NUMBER = 1;
private int assetFieldType_ = 0;
/**
* <pre>
* Output only. FieldType of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v20.enums.AssetFieldTypeEnum.AssetFieldType asset_field_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the assetFieldType field is set.
*/
@java.lang.Override public boolean hasAssetFieldType() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* Output only. FieldType of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v20.enums.AssetFieldTypeEnum.AssetFieldType asset_field_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The enum numeric value on the wire for assetFieldType.
*/
@java.lang.Override public int getAssetFieldTypeValue() {
return assetFieldType_;
}
/**
* <pre>
* Output only. FieldType of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v20.enums.AssetFieldTypeEnum.AssetFieldType asset_field_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The assetFieldType.
*/
@java.lang.Override public com.google.ads.googleads.v20.enums.AssetFieldTypeEnum.AssetFieldType getAssetFieldType() {
com.google.ads.googleads.v20.enums.AssetFieldTypeEnum.AssetFieldType result = com.google.ads.googleads.v20.enums.AssetFieldTypeEnum.AssetFieldType.forNumber(assetFieldType_);
return result == null ? com.google.ads.googleads.v20.enums.AssetFieldTypeEnum.AssetFieldType.UNRECOGNIZED : result;
}
public static final int ASSET_SOURCE_FIELD_NUMBER = 2;
private int assetSource_ = 0;
/**
* <pre>
* Output only. Source of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v20.enums.AssetSourceEnum.AssetSource asset_source = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the assetSource field is set.
*/
@java.lang.Override public boolean hasAssetSource() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Output only. Source of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v20.enums.AssetSourceEnum.AssetSource asset_source = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The enum numeric value on the wire for assetSource.
*/
@java.lang.Override public int getAssetSourceValue() {
return assetSource_;
}
/**
* <pre>
* Output only. Source of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v20.enums.AssetSourceEnum.AssetSource asset_source = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The assetSource.
*/
@java.lang.Override public com.google.ads.googleads.v20.enums.AssetSourceEnum.AssetSource getAssetSource() {
com.google.ads.googleads.v20.enums.AssetSourceEnum.AssetSource result = com.google.ads.googleads.v20.enums.AssetSourceEnum.AssetSource.forNumber(assetSource_);
return result == null ? com.google.ads.googleads.v20.enums.AssetSourceEnum.AssetSource.UNRECOGNIZED : result;
}
public static final int POLICY_SUMMARY_INFO_FIELD_NUMBER = 3;
private com.google.ads.googleads.v20.resources.AssetPolicySummary policySummaryInfo_;
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v20.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the policySummaryInfo field is set.
*/
@java.lang.Override
public boolean hasPolicySummaryInfo() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v20.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The policySummaryInfo.
*/
@java.lang.Override
public com.google.ads.googleads.v20.resources.AssetPolicySummary getPolicySummaryInfo() {
return policySummaryInfo_ == null ? com.google.ads.googleads.v20.resources.AssetPolicySummary.getDefaultInstance() : policySummaryInfo_;
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v20.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
@java.lang.Override
public com.google.ads.googleads.v20.resources.AssetPolicySummaryOrBuilder getPolicySummaryInfoOrBuilder() {
return policySummaryInfo_ == null ? com.google.ads.googleads.v20.resources.AssetPolicySummary.getDefaultInstance() : policySummaryInfo_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeEnum(1, assetFieldType_);
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeEnum(2, assetSource_);
}
if (((bitField0_ & 0x00000004) != 0)) {
output.writeMessage(3, getPolicySummaryInfo());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(1, assetFieldType_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(2, assetSource_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, getPolicySummaryInfo());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary)) {
return super.equals(obj);
}
com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary other = (com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary) obj;
if (hasAssetFieldType() != other.hasAssetFieldType()) return false;
if (hasAssetFieldType()) {
if (assetFieldType_ != other.assetFieldType_) return false;
}
if (hasAssetSource() != other.hasAssetSource()) return false;
if (hasAssetSource()) {
if (assetSource_ != other.assetSource_) return false;
}
if (hasPolicySummaryInfo() != other.hasPolicySummaryInfo()) return false;
if (hasPolicySummaryInfo()) {
if (!getPolicySummaryInfo()
.equals(other.getPolicySummaryInfo())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasAssetFieldType()) {
hash = (37 * hash) + ASSET_FIELD_TYPE_FIELD_NUMBER;
hash = (53 * hash) + assetFieldType_;
}
if (hasAssetSource()) {
hash = (37 * hash) + ASSET_SOURCE_FIELD_NUMBER;
hash = (53 * hash) + assetSource_;
}
if (hasPolicySummaryInfo()) {
hash = (37 * hash) + POLICY_SUMMARY_INFO_FIELD_NUMBER;
hash = (53 * hash) + getPolicySummaryInfo().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Contains policy information for an asset under AssetFieldType context.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.resources.AssetFieldTypePolicySummary}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.resources.AssetFieldTypePolicySummary)
com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummaryOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.resources.AssetProto.internal_static_google_ads_googleads_v20_resources_AssetFieldTypePolicySummary_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.resources.AssetProto.internal_static_google_ads_googleads_v20_resources_AssetFieldTypePolicySummary_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary.class, com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary.Builder.class);
}
// Construct using com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getPolicySummaryInfoFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
assetFieldType_ = 0;
assetSource_ = 0;
policySummaryInfo_ = null;
if (policySummaryInfoBuilder_ != null) {
policySummaryInfoBuilder_.dispose();
policySummaryInfoBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v20.resources.AssetProto.internal_static_google_ads_googleads_v20_resources_AssetFieldTypePolicySummary_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary getDefaultInstanceForType() {
return com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary build() {
com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary buildPartial() {
com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary result = new com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.assetFieldType_ = assetFieldType_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.assetSource_ = assetSource_;
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.policySummaryInfo_ = policySummaryInfoBuilder_ == null
? policySummaryInfo_
: policySummaryInfoBuilder_.build();
to_bitField0_ |= 0x00000004;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary) {
return mergeFrom((com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary other) {
if (other == com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary.getDefaultInstance()) return this;
if (other.hasAssetFieldType()) {
setAssetFieldType(other.getAssetFieldType());
}
if (other.hasAssetSource()) {
setAssetSource(other.getAssetSource());
}
if (other.hasPolicySummaryInfo()) {
mergePolicySummaryInfo(other.getPolicySummaryInfo());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
assetFieldType_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 16: {
assetSource_ = input.readEnum();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26: {
input.readMessage(
getPolicySummaryInfoFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int assetFieldType_ = 0;
/**
* <pre>
* Output only. FieldType of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v20.enums.AssetFieldTypeEnum.AssetFieldType asset_field_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the assetFieldType field is set.
*/
@java.lang.Override public boolean hasAssetFieldType() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* Output only. FieldType of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v20.enums.AssetFieldTypeEnum.AssetFieldType asset_field_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The enum numeric value on the wire for assetFieldType.
*/
@java.lang.Override public int getAssetFieldTypeValue() {
return assetFieldType_;
}
/**
* <pre>
* Output only. FieldType of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v20.enums.AssetFieldTypeEnum.AssetFieldType asset_field_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @param value The enum numeric value on the wire for assetFieldType to set.
* @return This builder for chaining.
*/
public Builder setAssetFieldTypeValue(int value) {
assetFieldType_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* Output only. FieldType of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v20.enums.AssetFieldTypeEnum.AssetFieldType asset_field_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The assetFieldType.
*/
@java.lang.Override
public com.google.ads.googleads.v20.enums.AssetFieldTypeEnum.AssetFieldType getAssetFieldType() {
com.google.ads.googleads.v20.enums.AssetFieldTypeEnum.AssetFieldType result = com.google.ads.googleads.v20.enums.AssetFieldTypeEnum.AssetFieldType.forNumber(assetFieldType_);
return result == null ? com.google.ads.googleads.v20.enums.AssetFieldTypeEnum.AssetFieldType.UNRECOGNIZED : result;
}
/**
* <pre>
* Output only. FieldType of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v20.enums.AssetFieldTypeEnum.AssetFieldType asset_field_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @param value The assetFieldType to set.
* @return This builder for chaining.
*/
public Builder setAssetFieldType(com.google.ads.googleads.v20.enums.AssetFieldTypeEnum.AssetFieldType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
assetFieldType_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* Output only. FieldType of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v20.enums.AssetFieldTypeEnum.AssetFieldType asset_field_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return This builder for chaining.
*/
public Builder clearAssetFieldType() {
bitField0_ = (bitField0_ & ~0x00000001);
assetFieldType_ = 0;
onChanged();
return this;
}
private int assetSource_ = 0;
/**
* <pre>
* Output only. Source of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v20.enums.AssetSourceEnum.AssetSource asset_source = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the assetSource field is set.
*/
@java.lang.Override public boolean hasAssetSource() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Output only. Source of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v20.enums.AssetSourceEnum.AssetSource asset_source = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The enum numeric value on the wire for assetSource.
*/
@java.lang.Override public int getAssetSourceValue() {
return assetSource_;
}
/**
* <pre>
* Output only. Source of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v20.enums.AssetSourceEnum.AssetSource asset_source = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @param value The enum numeric value on the wire for assetSource to set.
* @return This builder for chaining.
*/
public Builder setAssetSourceValue(int value) {
assetSource_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Output only. Source of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v20.enums.AssetSourceEnum.AssetSource asset_source = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The assetSource.
*/
@java.lang.Override
public com.google.ads.googleads.v20.enums.AssetSourceEnum.AssetSource getAssetSource() {
com.google.ads.googleads.v20.enums.AssetSourceEnum.AssetSource result = com.google.ads.googleads.v20.enums.AssetSourceEnum.AssetSource.forNumber(assetSource_);
return result == null ? com.google.ads.googleads.v20.enums.AssetSourceEnum.AssetSource.UNRECOGNIZED : result;
}
/**
* <pre>
* Output only. Source of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v20.enums.AssetSourceEnum.AssetSource asset_source = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @param value The assetSource to set.
* @return This builder for chaining.
*/
public Builder setAssetSource(com.google.ads.googleads.v20.enums.AssetSourceEnum.AssetSource value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
assetSource_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* Output only. Source of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v20.enums.AssetSourceEnum.AssetSource asset_source = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return This builder for chaining.
*/
public Builder clearAssetSource() {
bitField0_ = (bitField0_ & ~0x00000002);
assetSource_ = 0;
onChanged();
return this;
}
private com.google.ads.googleads.v20.resources.AssetPolicySummary policySummaryInfo_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v20.resources.AssetPolicySummary, com.google.ads.googleads.v20.resources.AssetPolicySummary.Builder, com.google.ads.googleads.v20.resources.AssetPolicySummaryOrBuilder> policySummaryInfoBuilder_;
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v20.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the policySummaryInfo field is set.
*/
public boolean hasPolicySummaryInfo() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v20.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The policySummaryInfo.
*/
public com.google.ads.googleads.v20.resources.AssetPolicySummary getPolicySummaryInfo() {
if (policySummaryInfoBuilder_ == null) {
return policySummaryInfo_ == null ? com.google.ads.googleads.v20.resources.AssetPolicySummary.getDefaultInstance() : policySummaryInfo_;
} else {
return policySummaryInfoBuilder_.getMessage();
}
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v20.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public Builder setPolicySummaryInfo(com.google.ads.googleads.v20.resources.AssetPolicySummary value) {
if (policySummaryInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
policySummaryInfo_ = value;
} else {
policySummaryInfoBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v20.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public Builder setPolicySummaryInfo(
com.google.ads.googleads.v20.resources.AssetPolicySummary.Builder builderForValue) {
if (policySummaryInfoBuilder_ == null) {
policySummaryInfo_ = builderForValue.build();
} else {
policySummaryInfoBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v20.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public Builder mergePolicySummaryInfo(com.google.ads.googleads.v20.resources.AssetPolicySummary value) {
if (policySummaryInfoBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0) &&
policySummaryInfo_ != null &&
policySummaryInfo_ != com.google.ads.googleads.v20.resources.AssetPolicySummary.getDefaultInstance()) {
getPolicySummaryInfoBuilder().mergeFrom(value);
} else {
policySummaryInfo_ = value;
}
} else {
policySummaryInfoBuilder_.mergeFrom(value);
}
if (policySummaryInfo_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v20.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public Builder clearPolicySummaryInfo() {
bitField0_ = (bitField0_ & ~0x00000004);
policySummaryInfo_ = null;
if (policySummaryInfoBuilder_ != null) {
policySummaryInfoBuilder_.dispose();
policySummaryInfoBuilder_ = null;
}
onChanged();
return this;
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v20.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public com.google.ads.googleads.v20.resources.AssetPolicySummary.Builder getPolicySummaryInfoBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getPolicySummaryInfoFieldBuilder().getBuilder();
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v20.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public com.google.ads.googleads.v20.resources.AssetPolicySummaryOrBuilder getPolicySummaryInfoOrBuilder() {
if (policySummaryInfoBuilder_ != null) {
return policySummaryInfoBuilder_.getMessageOrBuilder();
} else {
return policySummaryInfo_ == null ?
com.google.ads.googleads.v20.resources.AssetPolicySummary.getDefaultInstance() : policySummaryInfo_;
}
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v20.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v20.resources.AssetPolicySummary, com.google.ads.googleads.v20.resources.AssetPolicySummary.Builder, com.google.ads.googleads.v20.resources.AssetPolicySummaryOrBuilder>
getPolicySummaryInfoFieldBuilder() {
if (policySummaryInfoBuilder_ == null) {
policySummaryInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v20.resources.AssetPolicySummary, com.google.ads.googleads.v20.resources.AssetPolicySummary.Builder, com.google.ads.googleads.v20.resources.AssetPolicySummaryOrBuilder>(
getPolicySummaryInfo(),
getParentForChildren(),
isClean());
policySummaryInfo_ = null;
}
return policySummaryInfoBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.resources.AssetFieldTypePolicySummary)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v20.resources.AssetFieldTypePolicySummary)
private static final com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary();
}
public static com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<AssetFieldTypePolicySummary>
PARSER = new com.google.protobuf.AbstractParser<AssetFieldTypePolicySummary>() {
@java.lang.Override
public AssetFieldTypePolicySummary parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<AssetFieldTypePolicySummary> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<AssetFieldTypePolicySummary> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 37,221 | google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/resources/AssetFieldTypePolicySummary.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v21/resources/asset.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v21.resources;
/**
* <pre>
* Contains policy information for an asset under AssetFieldType context.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.resources.AssetFieldTypePolicySummary}
*/
public final class AssetFieldTypePolicySummary extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v21.resources.AssetFieldTypePolicySummary)
AssetFieldTypePolicySummaryOrBuilder {
private static final long serialVersionUID = 0L;
// Use AssetFieldTypePolicySummary.newBuilder() to construct.
private AssetFieldTypePolicySummary(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private AssetFieldTypePolicySummary() {
assetFieldType_ = 0;
assetSource_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new AssetFieldTypePolicySummary();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.resources.AssetProto.internal_static_google_ads_googleads_v21_resources_AssetFieldTypePolicySummary_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.resources.AssetProto.internal_static_google_ads_googleads_v21_resources_AssetFieldTypePolicySummary_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary.class, com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary.Builder.class);
}
private int bitField0_;
public static final int ASSET_FIELD_TYPE_FIELD_NUMBER = 1;
private int assetFieldType_ = 0;
/**
* <pre>
* Output only. FieldType of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v21.enums.AssetFieldTypeEnum.AssetFieldType asset_field_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the assetFieldType field is set.
*/
@java.lang.Override public boolean hasAssetFieldType() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* Output only. FieldType of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v21.enums.AssetFieldTypeEnum.AssetFieldType asset_field_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The enum numeric value on the wire for assetFieldType.
*/
@java.lang.Override public int getAssetFieldTypeValue() {
return assetFieldType_;
}
/**
* <pre>
* Output only. FieldType of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v21.enums.AssetFieldTypeEnum.AssetFieldType asset_field_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The assetFieldType.
*/
@java.lang.Override public com.google.ads.googleads.v21.enums.AssetFieldTypeEnum.AssetFieldType getAssetFieldType() {
com.google.ads.googleads.v21.enums.AssetFieldTypeEnum.AssetFieldType result = com.google.ads.googleads.v21.enums.AssetFieldTypeEnum.AssetFieldType.forNumber(assetFieldType_);
return result == null ? com.google.ads.googleads.v21.enums.AssetFieldTypeEnum.AssetFieldType.UNRECOGNIZED : result;
}
public static final int ASSET_SOURCE_FIELD_NUMBER = 2;
private int assetSource_ = 0;
/**
* <pre>
* Output only. Source of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v21.enums.AssetSourceEnum.AssetSource asset_source = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the assetSource field is set.
*/
@java.lang.Override public boolean hasAssetSource() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Output only. Source of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v21.enums.AssetSourceEnum.AssetSource asset_source = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The enum numeric value on the wire for assetSource.
*/
@java.lang.Override public int getAssetSourceValue() {
return assetSource_;
}
/**
* <pre>
* Output only. Source of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v21.enums.AssetSourceEnum.AssetSource asset_source = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The assetSource.
*/
@java.lang.Override public com.google.ads.googleads.v21.enums.AssetSourceEnum.AssetSource getAssetSource() {
com.google.ads.googleads.v21.enums.AssetSourceEnum.AssetSource result = com.google.ads.googleads.v21.enums.AssetSourceEnum.AssetSource.forNumber(assetSource_);
return result == null ? com.google.ads.googleads.v21.enums.AssetSourceEnum.AssetSource.UNRECOGNIZED : result;
}
public static final int POLICY_SUMMARY_INFO_FIELD_NUMBER = 3;
private com.google.ads.googleads.v21.resources.AssetPolicySummary policySummaryInfo_;
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v21.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the policySummaryInfo field is set.
*/
@java.lang.Override
public boolean hasPolicySummaryInfo() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v21.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The policySummaryInfo.
*/
@java.lang.Override
public com.google.ads.googleads.v21.resources.AssetPolicySummary getPolicySummaryInfo() {
return policySummaryInfo_ == null ? com.google.ads.googleads.v21.resources.AssetPolicySummary.getDefaultInstance() : policySummaryInfo_;
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v21.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
@java.lang.Override
public com.google.ads.googleads.v21.resources.AssetPolicySummaryOrBuilder getPolicySummaryInfoOrBuilder() {
return policySummaryInfo_ == null ? com.google.ads.googleads.v21.resources.AssetPolicySummary.getDefaultInstance() : policySummaryInfo_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeEnum(1, assetFieldType_);
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeEnum(2, assetSource_);
}
if (((bitField0_ & 0x00000004) != 0)) {
output.writeMessage(3, getPolicySummaryInfo());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(1, assetFieldType_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(2, assetSource_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, getPolicySummaryInfo());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary)) {
return super.equals(obj);
}
com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary other = (com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary) obj;
if (hasAssetFieldType() != other.hasAssetFieldType()) return false;
if (hasAssetFieldType()) {
if (assetFieldType_ != other.assetFieldType_) return false;
}
if (hasAssetSource() != other.hasAssetSource()) return false;
if (hasAssetSource()) {
if (assetSource_ != other.assetSource_) return false;
}
if (hasPolicySummaryInfo() != other.hasPolicySummaryInfo()) return false;
if (hasPolicySummaryInfo()) {
if (!getPolicySummaryInfo()
.equals(other.getPolicySummaryInfo())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasAssetFieldType()) {
hash = (37 * hash) + ASSET_FIELD_TYPE_FIELD_NUMBER;
hash = (53 * hash) + assetFieldType_;
}
if (hasAssetSource()) {
hash = (37 * hash) + ASSET_SOURCE_FIELD_NUMBER;
hash = (53 * hash) + assetSource_;
}
if (hasPolicySummaryInfo()) {
hash = (37 * hash) + POLICY_SUMMARY_INFO_FIELD_NUMBER;
hash = (53 * hash) + getPolicySummaryInfo().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Contains policy information for an asset under AssetFieldType context.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.resources.AssetFieldTypePolicySummary}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.resources.AssetFieldTypePolicySummary)
com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummaryOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.resources.AssetProto.internal_static_google_ads_googleads_v21_resources_AssetFieldTypePolicySummary_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.resources.AssetProto.internal_static_google_ads_googleads_v21_resources_AssetFieldTypePolicySummary_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary.class, com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary.Builder.class);
}
// Construct using com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getPolicySummaryInfoFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
assetFieldType_ = 0;
assetSource_ = 0;
policySummaryInfo_ = null;
if (policySummaryInfoBuilder_ != null) {
policySummaryInfoBuilder_.dispose();
policySummaryInfoBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v21.resources.AssetProto.internal_static_google_ads_googleads_v21_resources_AssetFieldTypePolicySummary_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary getDefaultInstanceForType() {
return com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary build() {
com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary buildPartial() {
com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary result = new com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.assetFieldType_ = assetFieldType_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.assetSource_ = assetSource_;
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.policySummaryInfo_ = policySummaryInfoBuilder_ == null
? policySummaryInfo_
: policySummaryInfoBuilder_.build();
to_bitField0_ |= 0x00000004;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary) {
return mergeFrom((com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary other) {
if (other == com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary.getDefaultInstance()) return this;
if (other.hasAssetFieldType()) {
setAssetFieldType(other.getAssetFieldType());
}
if (other.hasAssetSource()) {
setAssetSource(other.getAssetSource());
}
if (other.hasPolicySummaryInfo()) {
mergePolicySummaryInfo(other.getPolicySummaryInfo());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
assetFieldType_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 16: {
assetSource_ = input.readEnum();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26: {
input.readMessage(
getPolicySummaryInfoFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int assetFieldType_ = 0;
/**
* <pre>
* Output only. FieldType of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v21.enums.AssetFieldTypeEnum.AssetFieldType asset_field_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the assetFieldType field is set.
*/
@java.lang.Override public boolean hasAssetFieldType() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* Output only. FieldType of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v21.enums.AssetFieldTypeEnum.AssetFieldType asset_field_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The enum numeric value on the wire for assetFieldType.
*/
@java.lang.Override public int getAssetFieldTypeValue() {
return assetFieldType_;
}
/**
* <pre>
* Output only. FieldType of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v21.enums.AssetFieldTypeEnum.AssetFieldType asset_field_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @param value The enum numeric value on the wire for assetFieldType to set.
* @return This builder for chaining.
*/
public Builder setAssetFieldTypeValue(int value) {
assetFieldType_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* Output only. FieldType of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v21.enums.AssetFieldTypeEnum.AssetFieldType asset_field_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The assetFieldType.
*/
@java.lang.Override
public com.google.ads.googleads.v21.enums.AssetFieldTypeEnum.AssetFieldType getAssetFieldType() {
com.google.ads.googleads.v21.enums.AssetFieldTypeEnum.AssetFieldType result = com.google.ads.googleads.v21.enums.AssetFieldTypeEnum.AssetFieldType.forNumber(assetFieldType_);
return result == null ? com.google.ads.googleads.v21.enums.AssetFieldTypeEnum.AssetFieldType.UNRECOGNIZED : result;
}
/**
* <pre>
* Output only. FieldType of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v21.enums.AssetFieldTypeEnum.AssetFieldType asset_field_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @param value The assetFieldType to set.
* @return This builder for chaining.
*/
public Builder setAssetFieldType(com.google.ads.googleads.v21.enums.AssetFieldTypeEnum.AssetFieldType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
assetFieldType_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* Output only. FieldType of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v21.enums.AssetFieldTypeEnum.AssetFieldType asset_field_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return This builder for chaining.
*/
public Builder clearAssetFieldType() {
bitField0_ = (bitField0_ & ~0x00000001);
assetFieldType_ = 0;
onChanged();
return this;
}
private int assetSource_ = 0;
/**
* <pre>
* Output only. Source of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v21.enums.AssetSourceEnum.AssetSource asset_source = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the assetSource field is set.
*/
@java.lang.Override public boolean hasAssetSource() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Output only. Source of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v21.enums.AssetSourceEnum.AssetSource asset_source = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The enum numeric value on the wire for assetSource.
*/
@java.lang.Override public int getAssetSourceValue() {
return assetSource_;
}
/**
* <pre>
* Output only. Source of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v21.enums.AssetSourceEnum.AssetSource asset_source = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @param value The enum numeric value on the wire for assetSource to set.
* @return This builder for chaining.
*/
public Builder setAssetSourceValue(int value) {
assetSource_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Output only. Source of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v21.enums.AssetSourceEnum.AssetSource asset_source = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The assetSource.
*/
@java.lang.Override
public com.google.ads.googleads.v21.enums.AssetSourceEnum.AssetSource getAssetSource() {
com.google.ads.googleads.v21.enums.AssetSourceEnum.AssetSource result = com.google.ads.googleads.v21.enums.AssetSourceEnum.AssetSource.forNumber(assetSource_);
return result == null ? com.google.ads.googleads.v21.enums.AssetSourceEnum.AssetSource.UNRECOGNIZED : result;
}
/**
* <pre>
* Output only. Source of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v21.enums.AssetSourceEnum.AssetSource asset_source = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @param value The assetSource to set.
* @return This builder for chaining.
*/
public Builder setAssetSource(com.google.ads.googleads.v21.enums.AssetSourceEnum.AssetSource value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
assetSource_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* Output only. Source of this asset.
* </pre>
*
* <code>optional .google.ads.googleads.v21.enums.AssetSourceEnum.AssetSource asset_source = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return This builder for chaining.
*/
public Builder clearAssetSource() {
bitField0_ = (bitField0_ & ~0x00000002);
assetSource_ = 0;
onChanged();
return this;
}
private com.google.ads.googleads.v21.resources.AssetPolicySummary policySummaryInfo_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v21.resources.AssetPolicySummary, com.google.ads.googleads.v21.resources.AssetPolicySummary.Builder, com.google.ads.googleads.v21.resources.AssetPolicySummaryOrBuilder> policySummaryInfoBuilder_;
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v21.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the policySummaryInfo field is set.
*/
public boolean hasPolicySummaryInfo() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v21.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The policySummaryInfo.
*/
public com.google.ads.googleads.v21.resources.AssetPolicySummary getPolicySummaryInfo() {
if (policySummaryInfoBuilder_ == null) {
return policySummaryInfo_ == null ? com.google.ads.googleads.v21.resources.AssetPolicySummary.getDefaultInstance() : policySummaryInfo_;
} else {
return policySummaryInfoBuilder_.getMessage();
}
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v21.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public Builder setPolicySummaryInfo(com.google.ads.googleads.v21.resources.AssetPolicySummary value) {
if (policySummaryInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
policySummaryInfo_ = value;
} else {
policySummaryInfoBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v21.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public Builder setPolicySummaryInfo(
com.google.ads.googleads.v21.resources.AssetPolicySummary.Builder builderForValue) {
if (policySummaryInfoBuilder_ == null) {
policySummaryInfo_ = builderForValue.build();
} else {
policySummaryInfoBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v21.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public Builder mergePolicySummaryInfo(com.google.ads.googleads.v21.resources.AssetPolicySummary value) {
if (policySummaryInfoBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0) &&
policySummaryInfo_ != null &&
policySummaryInfo_ != com.google.ads.googleads.v21.resources.AssetPolicySummary.getDefaultInstance()) {
getPolicySummaryInfoBuilder().mergeFrom(value);
} else {
policySummaryInfo_ = value;
}
} else {
policySummaryInfoBuilder_.mergeFrom(value);
}
if (policySummaryInfo_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v21.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public Builder clearPolicySummaryInfo() {
bitField0_ = (bitField0_ & ~0x00000004);
policySummaryInfo_ = null;
if (policySummaryInfoBuilder_ != null) {
policySummaryInfoBuilder_.dispose();
policySummaryInfoBuilder_ = null;
}
onChanged();
return this;
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v21.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public com.google.ads.googleads.v21.resources.AssetPolicySummary.Builder getPolicySummaryInfoBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getPolicySummaryInfoFieldBuilder().getBuilder();
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v21.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public com.google.ads.googleads.v21.resources.AssetPolicySummaryOrBuilder getPolicySummaryInfoOrBuilder() {
if (policySummaryInfoBuilder_ != null) {
return policySummaryInfoBuilder_.getMessageOrBuilder();
} else {
return policySummaryInfo_ == null ?
com.google.ads.googleads.v21.resources.AssetPolicySummary.getDefaultInstance() : policySummaryInfo_;
}
}
/**
* <pre>
* Output only. Policy summary.
* </pre>
*
* <code>optional .google.ads.googleads.v21.resources.AssetPolicySummary policy_summary_info = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v21.resources.AssetPolicySummary, com.google.ads.googleads.v21.resources.AssetPolicySummary.Builder, com.google.ads.googleads.v21.resources.AssetPolicySummaryOrBuilder>
getPolicySummaryInfoFieldBuilder() {
if (policySummaryInfoBuilder_ == null) {
policySummaryInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v21.resources.AssetPolicySummary, com.google.ads.googleads.v21.resources.AssetPolicySummary.Builder, com.google.ads.googleads.v21.resources.AssetPolicySummaryOrBuilder>(
getPolicySummaryInfo(),
getParentForChildren(),
isClean());
policySummaryInfo_ = null;
}
return policySummaryInfoBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.resources.AssetFieldTypePolicySummary)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v21.resources.AssetFieldTypePolicySummary)
private static final com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary();
}
public static com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<AssetFieldTypePolicySummary>
PARSER = new com.google.protobuf.AbstractParser<AssetFieldTypePolicySummary>() {
@java.lang.Override
public AssetFieldTypePolicySummary parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<AssetFieldTypePolicySummary> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<AssetFieldTypePolicySummary> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v21.resources.AssetFieldTypePolicySummary getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,977 | java-retail/proto-google-cloud-retail-v2beta/src/main/java/com/google/cloud/retail/v2beta/ListProductsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/retail/v2beta/product_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.retail.v2beta;
/**
*
*
* <pre>
* Response message for
* [ProductService.ListProducts][google.cloud.retail.v2beta.ProductService.ListProducts]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2beta.ListProductsResponse}
*/
public final class ListProductsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.retail.v2beta.ListProductsResponse)
ListProductsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListProductsResponse.newBuilder() to construct.
private ListProductsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListProductsResponse() {
products_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListProductsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2beta.ProductServiceProto
.internal_static_google_cloud_retail_v2beta_ListProductsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2beta.ProductServiceProto
.internal_static_google_cloud_retail_v2beta_ListProductsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2beta.ListProductsResponse.class,
com.google.cloud.retail.v2beta.ListProductsResponse.Builder.class);
}
public static final int PRODUCTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.retail.v2beta.Product> products_;
/**
*
*
* <pre>
* The [Product][google.cloud.retail.v2beta.Product]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2beta.Product products = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.retail.v2beta.Product> getProductsList() {
return products_;
}
/**
*
*
* <pre>
* The [Product][google.cloud.retail.v2beta.Product]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2beta.Product products = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.retail.v2beta.ProductOrBuilder>
getProductsOrBuilderList() {
return products_;
}
/**
*
*
* <pre>
* The [Product][google.cloud.retail.v2beta.Product]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2beta.Product products = 1;</code>
*/
@java.lang.Override
public int getProductsCount() {
return products_.size();
}
/**
*
*
* <pre>
* The [Product][google.cloud.retail.v2beta.Product]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2beta.Product products = 1;</code>
*/
@java.lang.Override
public com.google.cloud.retail.v2beta.Product getProducts(int index) {
return products_.get(index);
}
/**
*
*
* <pre>
* The [Product][google.cloud.retail.v2beta.Product]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2beta.Product products = 1;</code>
*/
@java.lang.Override
public com.google.cloud.retail.v2beta.ProductOrBuilder getProductsOrBuilder(int index) {
return products_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be sent as
* [ListProductsRequest.page_token][google.cloud.retail.v2beta.ListProductsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token that can be sent as
* [ListProductsRequest.page_token][google.cloud.retail.v2beta.ListProductsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < products_.size(); i++) {
output.writeMessage(1, products_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < products_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, products_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.retail.v2beta.ListProductsResponse)) {
return super.equals(obj);
}
com.google.cloud.retail.v2beta.ListProductsResponse other =
(com.google.cloud.retail.v2beta.ListProductsResponse) obj;
if (!getProductsList().equals(other.getProductsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getProductsCount() > 0) {
hash = (37 * hash) + PRODUCTS_FIELD_NUMBER;
hash = (53 * hash) + getProductsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.retail.v2beta.ListProductsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2beta.ListProductsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.ListProductsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2beta.ListProductsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.ListProductsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2beta.ListProductsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.ListProductsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2beta.ListProductsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.ListProductsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2beta.ListProductsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.ListProductsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2beta.ListProductsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.retail.v2beta.ListProductsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [ProductService.ListProducts][google.cloud.retail.v2beta.ProductService.ListProducts]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2beta.ListProductsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.retail.v2beta.ListProductsResponse)
com.google.cloud.retail.v2beta.ListProductsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2beta.ProductServiceProto
.internal_static_google_cloud_retail_v2beta_ListProductsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2beta.ProductServiceProto
.internal_static_google_cloud_retail_v2beta_ListProductsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2beta.ListProductsResponse.class,
com.google.cloud.retail.v2beta.ListProductsResponse.Builder.class);
}
// Construct using com.google.cloud.retail.v2beta.ListProductsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (productsBuilder_ == null) {
products_ = java.util.Collections.emptyList();
} else {
products_ = null;
productsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.retail.v2beta.ProductServiceProto
.internal_static_google_cloud_retail_v2beta_ListProductsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.retail.v2beta.ListProductsResponse getDefaultInstanceForType() {
return com.google.cloud.retail.v2beta.ListProductsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.retail.v2beta.ListProductsResponse build() {
com.google.cloud.retail.v2beta.ListProductsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.retail.v2beta.ListProductsResponse buildPartial() {
com.google.cloud.retail.v2beta.ListProductsResponse result =
new com.google.cloud.retail.v2beta.ListProductsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.retail.v2beta.ListProductsResponse result) {
if (productsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
products_ = java.util.Collections.unmodifiableList(products_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.products_ = products_;
} else {
result.products_ = productsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.retail.v2beta.ListProductsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.retail.v2beta.ListProductsResponse) {
return mergeFrom((com.google.cloud.retail.v2beta.ListProductsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.retail.v2beta.ListProductsResponse other) {
if (other == com.google.cloud.retail.v2beta.ListProductsResponse.getDefaultInstance())
return this;
if (productsBuilder_ == null) {
if (!other.products_.isEmpty()) {
if (products_.isEmpty()) {
products_ = other.products_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureProductsIsMutable();
products_.addAll(other.products_);
}
onChanged();
}
} else {
if (!other.products_.isEmpty()) {
if (productsBuilder_.isEmpty()) {
productsBuilder_.dispose();
productsBuilder_ = null;
products_ = other.products_;
bitField0_ = (bitField0_ & ~0x00000001);
productsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getProductsFieldBuilder()
: null;
} else {
productsBuilder_.addAllMessages(other.products_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.retail.v2beta.Product m =
input.readMessage(
com.google.cloud.retail.v2beta.Product.parser(), extensionRegistry);
if (productsBuilder_ == null) {
ensureProductsIsMutable();
products_.add(m);
} else {
productsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.retail.v2beta.Product> products_ =
java.util.Collections.emptyList();
private void ensureProductsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
products_ = new java.util.ArrayList<com.google.cloud.retail.v2beta.Product>(products_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.retail.v2beta.Product,
com.google.cloud.retail.v2beta.Product.Builder,
com.google.cloud.retail.v2beta.ProductOrBuilder>
productsBuilder_;
/**
*
*
* <pre>
* The [Product][google.cloud.retail.v2beta.Product]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2beta.Product products = 1;</code>
*/
public java.util.List<com.google.cloud.retail.v2beta.Product> getProductsList() {
if (productsBuilder_ == null) {
return java.util.Collections.unmodifiableList(products_);
} else {
return productsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The [Product][google.cloud.retail.v2beta.Product]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2beta.Product products = 1;</code>
*/
public int getProductsCount() {
if (productsBuilder_ == null) {
return products_.size();
} else {
return productsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The [Product][google.cloud.retail.v2beta.Product]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2beta.Product products = 1;</code>
*/
public com.google.cloud.retail.v2beta.Product getProducts(int index) {
if (productsBuilder_ == null) {
return products_.get(index);
} else {
return productsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The [Product][google.cloud.retail.v2beta.Product]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2beta.Product products = 1;</code>
*/
public Builder setProducts(int index, com.google.cloud.retail.v2beta.Product value) {
if (productsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureProductsIsMutable();
products_.set(index, value);
onChanged();
} else {
productsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The [Product][google.cloud.retail.v2beta.Product]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2beta.Product products = 1;</code>
*/
public Builder setProducts(
int index, com.google.cloud.retail.v2beta.Product.Builder builderForValue) {
if (productsBuilder_ == null) {
ensureProductsIsMutable();
products_.set(index, builderForValue.build());
onChanged();
} else {
productsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The [Product][google.cloud.retail.v2beta.Product]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2beta.Product products = 1;</code>
*/
public Builder addProducts(com.google.cloud.retail.v2beta.Product value) {
if (productsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureProductsIsMutable();
products_.add(value);
onChanged();
} else {
productsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The [Product][google.cloud.retail.v2beta.Product]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2beta.Product products = 1;</code>
*/
public Builder addProducts(int index, com.google.cloud.retail.v2beta.Product value) {
if (productsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureProductsIsMutable();
products_.add(index, value);
onChanged();
} else {
productsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The [Product][google.cloud.retail.v2beta.Product]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2beta.Product products = 1;</code>
*/
public Builder addProducts(com.google.cloud.retail.v2beta.Product.Builder builderForValue) {
if (productsBuilder_ == null) {
ensureProductsIsMutable();
products_.add(builderForValue.build());
onChanged();
} else {
productsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The [Product][google.cloud.retail.v2beta.Product]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2beta.Product products = 1;</code>
*/
public Builder addProducts(
int index, com.google.cloud.retail.v2beta.Product.Builder builderForValue) {
if (productsBuilder_ == null) {
ensureProductsIsMutable();
products_.add(index, builderForValue.build());
onChanged();
} else {
productsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The [Product][google.cloud.retail.v2beta.Product]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2beta.Product products = 1;</code>
*/
public Builder addAllProducts(
java.lang.Iterable<? extends com.google.cloud.retail.v2beta.Product> values) {
if (productsBuilder_ == null) {
ensureProductsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, products_);
onChanged();
} else {
productsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The [Product][google.cloud.retail.v2beta.Product]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2beta.Product products = 1;</code>
*/
public Builder clearProducts() {
if (productsBuilder_ == null) {
products_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
productsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The [Product][google.cloud.retail.v2beta.Product]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2beta.Product products = 1;</code>
*/
public Builder removeProducts(int index) {
if (productsBuilder_ == null) {
ensureProductsIsMutable();
products_.remove(index);
onChanged();
} else {
productsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The [Product][google.cloud.retail.v2beta.Product]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2beta.Product products = 1;</code>
*/
public com.google.cloud.retail.v2beta.Product.Builder getProductsBuilder(int index) {
return getProductsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The [Product][google.cloud.retail.v2beta.Product]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2beta.Product products = 1;</code>
*/
public com.google.cloud.retail.v2beta.ProductOrBuilder getProductsOrBuilder(int index) {
if (productsBuilder_ == null) {
return products_.get(index);
} else {
return productsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The [Product][google.cloud.retail.v2beta.Product]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2beta.Product products = 1;</code>
*/
public java.util.List<? extends com.google.cloud.retail.v2beta.ProductOrBuilder>
getProductsOrBuilderList() {
if (productsBuilder_ != null) {
return productsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(products_);
}
}
/**
*
*
* <pre>
* The [Product][google.cloud.retail.v2beta.Product]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2beta.Product products = 1;</code>
*/
public com.google.cloud.retail.v2beta.Product.Builder addProductsBuilder() {
return getProductsFieldBuilder()
.addBuilder(com.google.cloud.retail.v2beta.Product.getDefaultInstance());
}
/**
*
*
* <pre>
* The [Product][google.cloud.retail.v2beta.Product]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2beta.Product products = 1;</code>
*/
public com.google.cloud.retail.v2beta.Product.Builder addProductsBuilder(int index) {
return getProductsFieldBuilder()
.addBuilder(index, com.google.cloud.retail.v2beta.Product.getDefaultInstance());
}
/**
*
*
* <pre>
* The [Product][google.cloud.retail.v2beta.Product]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2beta.Product products = 1;</code>
*/
public java.util.List<com.google.cloud.retail.v2beta.Product.Builder> getProductsBuilderList() {
return getProductsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.retail.v2beta.Product,
com.google.cloud.retail.v2beta.Product.Builder,
com.google.cloud.retail.v2beta.ProductOrBuilder>
getProductsFieldBuilder() {
if (productsBuilder_ == null) {
productsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.retail.v2beta.Product,
com.google.cloud.retail.v2beta.Product.Builder,
com.google.cloud.retail.v2beta.ProductOrBuilder>(
products_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
products_ = null;
}
return productsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be sent as
* [ListProductsRequest.page_token][google.cloud.retail.v2beta.ListProductsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token that can be sent as
* [ListProductsRequest.page_token][google.cloud.retail.v2beta.ListProductsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token that can be sent as
* [ListProductsRequest.page_token][google.cloud.retail.v2beta.ListProductsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be sent as
* [ListProductsRequest.page_token][google.cloud.retail.v2beta.ListProductsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be sent as
* [ListProductsRequest.page_token][google.cloud.retail.v2beta.ListProductsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.retail.v2beta.ListProductsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.retail.v2beta.ListProductsResponse)
private static final com.google.cloud.retail.v2beta.ListProductsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.retail.v2beta.ListProductsResponse();
}
public static com.google.cloud.retail.v2beta.ListProductsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListProductsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListProductsResponse>() {
@java.lang.Override
public ListProductsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListProductsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListProductsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.retail.v2beta.ListProductsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,981 | java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListDataItemsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/dataset_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1;
/**
*
*
* <pre>
* Response message for
* [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.ListDataItemsResponse}
*/
public final class ListDataItemsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.ListDataItemsResponse)
ListDataItemsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListDataItemsResponse.newBuilder() to construct.
private ListDataItemsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListDataItemsResponse() {
dataItems_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListDataItemsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ListDataItemsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ListDataItemsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse.class,
com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse.Builder.class);
}
public static final int DATA_ITEMS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.aiplatform.v1beta1.DataItem> dataItems_;
/**
*
*
* <pre>
* A list of DataItems that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.DataItem data_items = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.aiplatform.v1beta1.DataItem> getDataItemsList() {
return dataItems_;
}
/**
*
*
* <pre>
* A list of DataItems that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.DataItem data_items = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.aiplatform.v1beta1.DataItemOrBuilder>
getDataItemsOrBuilderList() {
return dataItems_;
}
/**
*
*
* <pre>
* A list of DataItems that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.DataItem data_items = 1;</code>
*/
@java.lang.Override
public int getDataItemsCount() {
return dataItems_.size();
}
/**
*
*
* <pre>
* A list of DataItems that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.DataItem data_items = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.DataItem getDataItems(int index) {
return dataItems_.get(index);
}
/**
*
*
* <pre>
* A list of DataItems that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.DataItem data_items = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.DataItemOrBuilder getDataItemsOrBuilder(int index) {
return dataItems_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The standard List next-page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The standard List next-page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < dataItems_.size(); i++) {
output.writeMessage(1, dataItems_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < dataItems_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, dataItems_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse other =
(com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse) obj;
if (!getDataItemsList().equals(other.getDataItemsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDataItemsCount() > 0) {
hash = (37 * hash) + DATA_ITEMS_FIELD_NUMBER;
hash = (53 * hash) + getDataItemsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.ListDataItemsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.ListDataItemsResponse)
com.google.cloud.aiplatform.v1beta1.ListDataItemsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ListDataItemsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ListDataItemsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse.class,
com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (dataItemsBuilder_ == null) {
dataItems_ = java.util.Collections.emptyList();
} else {
dataItems_ = null;
dataItemsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ListDataItemsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse build() {
com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse buildPartial() {
com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse result =
new com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse result) {
if (dataItemsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
dataItems_ = java.util.Collections.unmodifiableList(dataItems_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.dataItems_ = dataItems_;
} else {
result.dataItems_ = dataItemsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse) {
return mergeFrom((com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse other) {
if (other == com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse.getDefaultInstance())
return this;
if (dataItemsBuilder_ == null) {
if (!other.dataItems_.isEmpty()) {
if (dataItems_.isEmpty()) {
dataItems_ = other.dataItems_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDataItemsIsMutable();
dataItems_.addAll(other.dataItems_);
}
onChanged();
}
} else {
if (!other.dataItems_.isEmpty()) {
if (dataItemsBuilder_.isEmpty()) {
dataItemsBuilder_.dispose();
dataItemsBuilder_ = null;
dataItems_ = other.dataItems_;
bitField0_ = (bitField0_ & ~0x00000001);
dataItemsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getDataItemsFieldBuilder()
: null;
} else {
dataItemsBuilder_.addAllMessages(other.dataItems_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.aiplatform.v1beta1.DataItem m =
input.readMessage(
com.google.cloud.aiplatform.v1beta1.DataItem.parser(), extensionRegistry);
if (dataItemsBuilder_ == null) {
ensureDataItemsIsMutable();
dataItems_.add(m);
} else {
dataItemsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.aiplatform.v1beta1.DataItem> dataItems_ =
java.util.Collections.emptyList();
private void ensureDataItemsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
dataItems_ =
new java.util.ArrayList<com.google.cloud.aiplatform.v1beta1.DataItem>(dataItems_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.DataItem,
com.google.cloud.aiplatform.v1beta1.DataItem.Builder,
com.google.cloud.aiplatform.v1beta1.DataItemOrBuilder>
dataItemsBuilder_;
/**
*
*
* <pre>
* A list of DataItems that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.DataItem data_items = 1;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1beta1.DataItem> getDataItemsList() {
if (dataItemsBuilder_ == null) {
return java.util.Collections.unmodifiableList(dataItems_);
} else {
return dataItemsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* A list of DataItems that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.DataItem data_items = 1;</code>
*/
public int getDataItemsCount() {
if (dataItemsBuilder_ == null) {
return dataItems_.size();
} else {
return dataItemsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* A list of DataItems that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.DataItem data_items = 1;</code>
*/
public com.google.cloud.aiplatform.v1beta1.DataItem getDataItems(int index) {
if (dataItemsBuilder_ == null) {
return dataItems_.get(index);
} else {
return dataItemsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* A list of DataItems that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.DataItem data_items = 1;</code>
*/
public Builder setDataItems(int index, com.google.cloud.aiplatform.v1beta1.DataItem value) {
if (dataItemsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDataItemsIsMutable();
dataItems_.set(index, value);
onChanged();
} else {
dataItemsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A list of DataItems that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.DataItem data_items = 1;</code>
*/
public Builder setDataItems(
int index, com.google.cloud.aiplatform.v1beta1.DataItem.Builder builderForValue) {
if (dataItemsBuilder_ == null) {
ensureDataItemsIsMutable();
dataItems_.set(index, builderForValue.build());
onChanged();
} else {
dataItemsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of DataItems that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.DataItem data_items = 1;</code>
*/
public Builder addDataItems(com.google.cloud.aiplatform.v1beta1.DataItem value) {
if (dataItemsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDataItemsIsMutable();
dataItems_.add(value);
onChanged();
} else {
dataItemsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* A list of DataItems that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.DataItem data_items = 1;</code>
*/
public Builder addDataItems(int index, com.google.cloud.aiplatform.v1beta1.DataItem value) {
if (dataItemsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDataItemsIsMutable();
dataItems_.add(index, value);
onChanged();
} else {
dataItemsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A list of DataItems that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.DataItem data_items = 1;</code>
*/
public Builder addDataItems(
com.google.cloud.aiplatform.v1beta1.DataItem.Builder builderForValue) {
if (dataItemsBuilder_ == null) {
ensureDataItemsIsMutable();
dataItems_.add(builderForValue.build());
onChanged();
} else {
dataItemsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of DataItems that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.DataItem data_items = 1;</code>
*/
public Builder addDataItems(
int index, com.google.cloud.aiplatform.v1beta1.DataItem.Builder builderForValue) {
if (dataItemsBuilder_ == null) {
ensureDataItemsIsMutable();
dataItems_.add(index, builderForValue.build());
onChanged();
} else {
dataItemsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of DataItems that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.DataItem data_items = 1;</code>
*/
public Builder addAllDataItems(
java.lang.Iterable<? extends com.google.cloud.aiplatform.v1beta1.DataItem> values) {
if (dataItemsBuilder_ == null) {
ensureDataItemsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, dataItems_);
onChanged();
} else {
dataItemsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* A list of DataItems that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.DataItem data_items = 1;</code>
*/
public Builder clearDataItems() {
if (dataItemsBuilder_ == null) {
dataItems_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
dataItemsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* A list of DataItems that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.DataItem data_items = 1;</code>
*/
public Builder removeDataItems(int index) {
if (dataItemsBuilder_ == null) {
ensureDataItemsIsMutable();
dataItems_.remove(index);
onChanged();
} else {
dataItemsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* A list of DataItems that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.DataItem data_items = 1;</code>
*/
public com.google.cloud.aiplatform.v1beta1.DataItem.Builder getDataItemsBuilder(int index) {
return getDataItemsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* A list of DataItems that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.DataItem data_items = 1;</code>
*/
public com.google.cloud.aiplatform.v1beta1.DataItemOrBuilder getDataItemsOrBuilder(int index) {
if (dataItemsBuilder_ == null) {
return dataItems_.get(index);
} else {
return dataItemsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* A list of DataItems that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.DataItem data_items = 1;</code>
*/
public java.util.List<? extends com.google.cloud.aiplatform.v1beta1.DataItemOrBuilder>
getDataItemsOrBuilderList() {
if (dataItemsBuilder_ != null) {
return dataItemsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(dataItems_);
}
}
/**
*
*
* <pre>
* A list of DataItems that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.DataItem data_items = 1;</code>
*/
public com.google.cloud.aiplatform.v1beta1.DataItem.Builder addDataItemsBuilder() {
return getDataItemsFieldBuilder()
.addBuilder(com.google.cloud.aiplatform.v1beta1.DataItem.getDefaultInstance());
}
/**
*
*
* <pre>
* A list of DataItems that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.DataItem data_items = 1;</code>
*/
public com.google.cloud.aiplatform.v1beta1.DataItem.Builder addDataItemsBuilder(int index) {
return getDataItemsFieldBuilder()
.addBuilder(index, com.google.cloud.aiplatform.v1beta1.DataItem.getDefaultInstance());
}
/**
*
*
* <pre>
* A list of DataItems that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.DataItem data_items = 1;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1beta1.DataItem.Builder>
getDataItemsBuilderList() {
return getDataItemsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.DataItem,
com.google.cloud.aiplatform.v1beta1.DataItem.Builder,
com.google.cloud.aiplatform.v1beta1.DataItemOrBuilder>
getDataItemsFieldBuilder() {
if (dataItemsBuilder_ == null) {
dataItemsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.DataItem,
com.google.cloud.aiplatform.v1beta1.DataItem.Builder,
com.google.cloud.aiplatform.v1beta1.DataItemOrBuilder>(
dataItems_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
dataItems_ = null;
}
return dataItemsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The standard List next-page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The standard List next-page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The standard List next-page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The standard List next-page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The standard List next-page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.ListDataItemsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.ListDataItemsResponse)
private static final com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse();
}
public static com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListDataItemsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListDataItemsResponse>() {
@java.lang.Override
public ListDataItemsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListDataItemsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListDataItemsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ListDataItemsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,011 | java-discoveryengine/proto-google-cloud-discoveryengine-v1/src/main/java/com/google/cloud/discoveryengine/v1/UpdateControlRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/discoveryengine/v1/control_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.discoveryengine.v1;
/**
*
*
* <pre>
* Request for UpdateControl method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1.UpdateControlRequest}
*/
public final class UpdateControlRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1.UpdateControlRequest)
UpdateControlRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateControlRequest.newBuilder() to construct.
private UpdateControlRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateControlRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateControlRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1.ControlServiceProto
.internal_static_google_cloud_discoveryengine_v1_UpdateControlRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1.ControlServiceProto
.internal_static_google_cloud_discoveryengine_v1_UpdateControlRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1.UpdateControlRequest.class,
com.google.cloud.discoveryengine.v1.UpdateControlRequest.Builder.class);
}
private int bitField0_;
public static final int CONTROL_FIELD_NUMBER = 1;
private com.google.cloud.discoveryengine.v1.Control control_;
/**
*
*
* <pre>
* Required. The Control to update.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.Control control = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the control field is set.
*/
@java.lang.Override
public boolean hasControl() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The Control to update.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.Control control = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The control.
*/
@java.lang.Override
public com.google.cloud.discoveryengine.v1.Control getControl() {
return control_ == null
? com.google.cloud.discoveryengine.v1.Control.getDefaultInstance()
: control_;
}
/**
*
*
* <pre>
* Required. The Control to update.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.Control control = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.discoveryengine.v1.ControlOrBuilder getControlOrBuilder() {
return control_ == null
? com.google.cloud.discoveryengine.v1.Control.getDefaultInstance()
: control_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Optional. Indicates which fields in the provided
* [Control][google.cloud.discoveryengine.v1.Control] to update. The following
* are NOT supported:
*
* * [Control.name][google.cloud.discoveryengine.v1.Control.name]
* * [Control.solution_type][google.cloud.discoveryengine.v1.Control.solution_type]
*
* If not set or empty, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. Indicates which fields in the provided
* [Control][google.cloud.discoveryengine.v1.Control] to update. The following
* are NOT supported:
*
* * [Control.name][google.cloud.discoveryengine.v1.Control.name]
* * [Control.solution_type][google.cloud.discoveryengine.v1.Control.solution_type]
*
* If not set or empty, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Optional. Indicates which fields in the provided
* [Control][google.cloud.discoveryengine.v1.Control] to update. The following
* are NOT supported:
*
* * [Control.name][google.cloud.discoveryengine.v1.Control.name]
* * [Control.solution_type][google.cloud.discoveryengine.v1.Control.solution_type]
*
* If not set or empty, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getControl());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getControl());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.discoveryengine.v1.UpdateControlRequest)) {
return super.equals(obj);
}
com.google.cloud.discoveryengine.v1.UpdateControlRequest other =
(com.google.cloud.discoveryengine.v1.UpdateControlRequest) obj;
if (hasControl() != other.hasControl()) return false;
if (hasControl()) {
if (!getControl().equals(other.getControl())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasControl()) {
hash = (37 * hash) + CONTROL_FIELD_NUMBER;
hash = (53 * hash) + getControl().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.discoveryengine.v1.UpdateControlRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.UpdateControlRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.UpdateControlRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.UpdateControlRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.UpdateControlRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.UpdateControlRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.UpdateControlRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.UpdateControlRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.UpdateControlRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.UpdateControlRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.UpdateControlRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.UpdateControlRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.discoveryengine.v1.UpdateControlRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for UpdateControl method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1.UpdateControlRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1.UpdateControlRequest)
com.google.cloud.discoveryengine.v1.UpdateControlRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1.ControlServiceProto
.internal_static_google_cloud_discoveryengine_v1_UpdateControlRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1.ControlServiceProto
.internal_static_google_cloud_discoveryengine_v1_UpdateControlRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1.UpdateControlRequest.class,
com.google.cloud.discoveryengine.v1.UpdateControlRequest.Builder.class);
}
// Construct using com.google.cloud.discoveryengine.v1.UpdateControlRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getControlFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
control_ = null;
if (controlBuilder_ != null) {
controlBuilder_.dispose();
controlBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.discoveryengine.v1.ControlServiceProto
.internal_static_google_cloud_discoveryengine_v1_UpdateControlRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.UpdateControlRequest getDefaultInstanceForType() {
return com.google.cloud.discoveryengine.v1.UpdateControlRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.UpdateControlRequest build() {
com.google.cloud.discoveryengine.v1.UpdateControlRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.UpdateControlRequest buildPartial() {
com.google.cloud.discoveryengine.v1.UpdateControlRequest result =
new com.google.cloud.discoveryengine.v1.UpdateControlRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.discoveryengine.v1.UpdateControlRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.control_ = controlBuilder_ == null ? control_ : controlBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.discoveryengine.v1.UpdateControlRequest) {
return mergeFrom((com.google.cloud.discoveryengine.v1.UpdateControlRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.discoveryengine.v1.UpdateControlRequest other) {
if (other == com.google.cloud.discoveryengine.v1.UpdateControlRequest.getDefaultInstance())
return this;
if (other.hasControl()) {
mergeControl(other.getControl());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getControlFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.discoveryengine.v1.Control control_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.discoveryengine.v1.Control,
com.google.cloud.discoveryengine.v1.Control.Builder,
com.google.cloud.discoveryengine.v1.ControlOrBuilder>
controlBuilder_;
/**
*
*
* <pre>
* Required. The Control to update.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.Control control = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the control field is set.
*/
public boolean hasControl() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The Control to update.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.Control control = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The control.
*/
public com.google.cloud.discoveryengine.v1.Control getControl() {
if (controlBuilder_ == null) {
return control_ == null
? com.google.cloud.discoveryengine.v1.Control.getDefaultInstance()
: control_;
} else {
return controlBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The Control to update.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.Control control = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setControl(com.google.cloud.discoveryengine.v1.Control value) {
if (controlBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
control_ = value;
} else {
controlBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Control to update.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.Control control = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setControl(com.google.cloud.discoveryengine.v1.Control.Builder builderForValue) {
if (controlBuilder_ == null) {
control_ = builderForValue.build();
} else {
controlBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Control to update.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.Control control = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeControl(com.google.cloud.discoveryengine.v1.Control value) {
if (controlBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& control_ != null
&& control_ != com.google.cloud.discoveryengine.v1.Control.getDefaultInstance()) {
getControlBuilder().mergeFrom(value);
} else {
control_ = value;
}
} else {
controlBuilder_.mergeFrom(value);
}
if (control_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The Control to update.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.Control control = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearControl() {
bitField0_ = (bitField0_ & ~0x00000001);
control_ = null;
if (controlBuilder_ != null) {
controlBuilder_.dispose();
controlBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Control to update.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.Control control = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.discoveryengine.v1.Control.Builder getControlBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getControlFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The Control to update.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.Control control = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.discoveryengine.v1.ControlOrBuilder getControlOrBuilder() {
if (controlBuilder_ != null) {
return controlBuilder_.getMessageOrBuilder();
} else {
return control_ == null
? com.google.cloud.discoveryengine.v1.Control.getDefaultInstance()
: control_;
}
}
/**
*
*
* <pre>
* Required. The Control to update.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.Control control = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.discoveryengine.v1.Control,
com.google.cloud.discoveryengine.v1.Control.Builder,
com.google.cloud.discoveryengine.v1.ControlOrBuilder>
getControlFieldBuilder() {
if (controlBuilder_ == null) {
controlBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.discoveryengine.v1.Control,
com.google.cloud.discoveryengine.v1.Control.Builder,
com.google.cloud.discoveryengine.v1.ControlOrBuilder>(
getControl(), getParentForChildren(), isClean());
control_ = null;
}
return controlBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Optional. Indicates which fields in the provided
* [Control][google.cloud.discoveryengine.v1.Control] to update. The following
* are NOT supported:
*
* * [Control.name][google.cloud.discoveryengine.v1.Control.name]
* * [Control.solution_type][google.cloud.discoveryengine.v1.Control.solution_type]
*
* If not set or empty, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. Indicates which fields in the provided
* [Control][google.cloud.discoveryengine.v1.Control] to update. The following
* are NOT supported:
*
* * [Control.name][google.cloud.discoveryengine.v1.Control.name]
* * [Control.solution_type][google.cloud.discoveryengine.v1.Control.solution_type]
*
* If not set or empty, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. Indicates which fields in the provided
* [Control][google.cloud.discoveryengine.v1.Control] to update. The following
* are NOT supported:
*
* * [Control.name][google.cloud.discoveryengine.v1.Control.name]
* * [Control.solution_type][google.cloud.discoveryengine.v1.Control.solution_type]
*
* If not set or empty, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Indicates which fields in the provided
* [Control][google.cloud.discoveryengine.v1.Control] to update. The following
* are NOT supported:
*
* * [Control.name][google.cloud.discoveryengine.v1.Control.name]
* * [Control.solution_type][google.cloud.discoveryengine.v1.Control.solution_type]
*
* If not set or empty, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Indicates which fields in the provided
* [Control][google.cloud.discoveryengine.v1.Control] to update. The following
* are NOT supported:
*
* * [Control.name][google.cloud.discoveryengine.v1.Control.name]
* * [Control.solution_type][google.cloud.discoveryengine.v1.Control.solution_type]
*
* If not set or empty, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. Indicates which fields in the provided
* [Control][google.cloud.discoveryengine.v1.Control] to update. The following
* are NOT supported:
*
* * [Control.name][google.cloud.discoveryengine.v1.Control.name]
* * [Control.solution_type][google.cloud.discoveryengine.v1.Control.solution_type]
*
* If not set or empty, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Indicates which fields in the provided
* [Control][google.cloud.discoveryengine.v1.Control] to update. The following
* are NOT supported:
*
* * [Control.name][google.cloud.discoveryengine.v1.Control.name]
* * [Control.solution_type][google.cloud.discoveryengine.v1.Control.solution_type]
*
* If not set or empty, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. Indicates which fields in the provided
* [Control][google.cloud.discoveryengine.v1.Control] to update. The following
* are NOT supported:
*
* * [Control.name][google.cloud.discoveryengine.v1.Control.name]
* * [Control.solution_type][google.cloud.discoveryengine.v1.Control.solution_type]
*
* If not set or empty, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Optional. Indicates which fields in the provided
* [Control][google.cloud.discoveryengine.v1.Control] to update. The following
* are NOT supported:
*
* * [Control.name][google.cloud.discoveryengine.v1.Control.name]
* * [Control.solution_type][google.cloud.discoveryengine.v1.Control.solution_type]
*
* If not set or empty, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1.UpdateControlRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1.UpdateControlRequest)
private static final com.google.cloud.discoveryengine.v1.UpdateControlRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1.UpdateControlRequest();
}
public static com.google.cloud.discoveryengine.v1.UpdateControlRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateControlRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateControlRequest>() {
@java.lang.Override
public UpdateControlRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateControlRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateControlRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.UpdateControlRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,907 | java-asset/proto-google-cloud-asset-v1p7beta1/src/main/java/com/google/cloud/asset/v1p7beta1/RelationshipAttributes.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/asset/v1p7beta1/assets.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.asset.v1p7beta1;
/**
*
*
* <pre>
* The relationship attributes which include `type`, `source_resource_type`,
* `target_resource_type` and `action`.
* </pre>
*
* Protobuf type {@code google.cloud.asset.v1p7beta1.RelationshipAttributes}
*/
public final class RelationshipAttributes extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.asset.v1p7beta1.RelationshipAttributes)
RelationshipAttributesOrBuilder {
private static final long serialVersionUID = 0L;
// Use RelationshipAttributes.newBuilder() to construct.
private RelationshipAttributes(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private RelationshipAttributes() {
type_ = "";
sourceResourceType_ = "";
targetResourceType_ = "";
action_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new RelationshipAttributes();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.asset.v1p7beta1.AssetProto
.internal_static_google_cloud_asset_v1p7beta1_RelationshipAttributes_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.asset.v1p7beta1.AssetProto
.internal_static_google_cloud_asset_v1p7beta1_RelationshipAttributes_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.asset.v1p7beta1.RelationshipAttributes.class,
com.google.cloud.asset.v1p7beta1.RelationshipAttributes.Builder.class);
}
public static final int TYPE_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object type_ = "";
/**
*
*
* <pre>
* The unique identifier of the relationship type. Example:
* `INSTANCE_TO_INSTANCEGROUP`
* </pre>
*
* <code>string type = 4;</code>
*
* @return The type.
*/
@java.lang.Override
public java.lang.String getType() {
java.lang.Object ref = type_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
type_ = s;
return s;
}
}
/**
*
*
* <pre>
* The unique identifier of the relationship type. Example:
* `INSTANCE_TO_INSTANCEGROUP`
* </pre>
*
* <code>string type = 4;</code>
*
* @return The bytes for type.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTypeBytes() {
java.lang.Object ref = type_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
type_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SOURCE_RESOURCE_TYPE_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object sourceResourceType_ = "";
/**
*
*
* <pre>
* The source asset type. Example: `compute.googleapis.com/Instance`
* </pre>
*
* <code>string source_resource_type = 1;</code>
*
* @return The sourceResourceType.
*/
@java.lang.Override
public java.lang.String getSourceResourceType() {
java.lang.Object ref = sourceResourceType_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
sourceResourceType_ = s;
return s;
}
}
/**
*
*
* <pre>
* The source asset type. Example: `compute.googleapis.com/Instance`
* </pre>
*
* <code>string source_resource_type = 1;</code>
*
* @return The bytes for sourceResourceType.
*/
@java.lang.Override
public com.google.protobuf.ByteString getSourceResourceTypeBytes() {
java.lang.Object ref = sourceResourceType_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
sourceResourceType_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TARGET_RESOURCE_TYPE_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object targetResourceType_ = "";
/**
*
*
* <pre>
* The target asset type. Example: `compute.googleapis.com/Disk`
* </pre>
*
* <code>string target_resource_type = 2;</code>
*
* @return The targetResourceType.
*/
@java.lang.Override
public java.lang.String getTargetResourceType() {
java.lang.Object ref = targetResourceType_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
targetResourceType_ = s;
return s;
}
}
/**
*
*
* <pre>
* The target asset type. Example: `compute.googleapis.com/Disk`
* </pre>
*
* <code>string target_resource_type = 2;</code>
*
* @return The bytes for targetResourceType.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTargetResourceTypeBytes() {
java.lang.Object ref = targetResourceType_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
targetResourceType_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ACTION_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object action_ = "";
/**
*
*
* <pre>
* The detail of the relationship, e.g. `contains`, `attaches`
* </pre>
*
* <code>string action = 3;</code>
*
* @return The action.
*/
@java.lang.Override
public java.lang.String getAction() {
java.lang.Object ref = action_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
action_ = s;
return s;
}
}
/**
*
*
* <pre>
* The detail of the relationship, e.g. `contains`, `attaches`
* </pre>
*
* <code>string action = 3;</code>
*
* @return The bytes for action.
*/
@java.lang.Override
public com.google.protobuf.ByteString getActionBytes() {
java.lang.Object ref = action_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
action_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceResourceType_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, sourceResourceType_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(targetResourceType_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, targetResourceType_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(action_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, action_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(type_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, type_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceResourceType_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, sourceResourceType_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(targetResourceType_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, targetResourceType_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(action_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, action_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(type_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, type_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.asset.v1p7beta1.RelationshipAttributes)) {
return super.equals(obj);
}
com.google.cloud.asset.v1p7beta1.RelationshipAttributes other =
(com.google.cloud.asset.v1p7beta1.RelationshipAttributes) obj;
if (!getType().equals(other.getType())) return false;
if (!getSourceResourceType().equals(other.getSourceResourceType())) return false;
if (!getTargetResourceType().equals(other.getTargetResourceType())) return false;
if (!getAction().equals(other.getAction())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + TYPE_FIELD_NUMBER;
hash = (53 * hash) + getType().hashCode();
hash = (37 * hash) + SOURCE_RESOURCE_TYPE_FIELD_NUMBER;
hash = (53 * hash) + getSourceResourceType().hashCode();
hash = (37 * hash) + TARGET_RESOURCE_TYPE_FIELD_NUMBER;
hash = (53 * hash) + getTargetResourceType().hashCode();
hash = (37 * hash) + ACTION_FIELD_NUMBER;
hash = (53 * hash) + getAction().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.asset.v1p7beta1.RelationshipAttributes parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.asset.v1p7beta1.RelationshipAttributes parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.asset.v1p7beta1.RelationshipAttributes parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.asset.v1p7beta1.RelationshipAttributes parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.asset.v1p7beta1.RelationshipAttributes parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.asset.v1p7beta1.RelationshipAttributes parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.asset.v1p7beta1.RelationshipAttributes parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.asset.v1p7beta1.RelationshipAttributes parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.asset.v1p7beta1.RelationshipAttributes parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.asset.v1p7beta1.RelationshipAttributes parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.asset.v1p7beta1.RelationshipAttributes parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.asset.v1p7beta1.RelationshipAttributes parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.asset.v1p7beta1.RelationshipAttributes prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The relationship attributes which include `type`, `source_resource_type`,
* `target_resource_type` and `action`.
* </pre>
*
* Protobuf type {@code google.cloud.asset.v1p7beta1.RelationshipAttributes}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.asset.v1p7beta1.RelationshipAttributes)
com.google.cloud.asset.v1p7beta1.RelationshipAttributesOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.asset.v1p7beta1.AssetProto
.internal_static_google_cloud_asset_v1p7beta1_RelationshipAttributes_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.asset.v1p7beta1.AssetProto
.internal_static_google_cloud_asset_v1p7beta1_RelationshipAttributes_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.asset.v1p7beta1.RelationshipAttributes.class,
com.google.cloud.asset.v1p7beta1.RelationshipAttributes.Builder.class);
}
// Construct using com.google.cloud.asset.v1p7beta1.RelationshipAttributes.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
type_ = "";
sourceResourceType_ = "";
targetResourceType_ = "";
action_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.asset.v1p7beta1.AssetProto
.internal_static_google_cloud_asset_v1p7beta1_RelationshipAttributes_descriptor;
}
@java.lang.Override
public com.google.cloud.asset.v1p7beta1.RelationshipAttributes getDefaultInstanceForType() {
return com.google.cloud.asset.v1p7beta1.RelationshipAttributes.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.asset.v1p7beta1.RelationshipAttributes build() {
com.google.cloud.asset.v1p7beta1.RelationshipAttributes result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.asset.v1p7beta1.RelationshipAttributes buildPartial() {
com.google.cloud.asset.v1p7beta1.RelationshipAttributes result =
new com.google.cloud.asset.v1p7beta1.RelationshipAttributes(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.asset.v1p7beta1.RelationshipAttributes result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.type_ = type_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.sourceResourceType_ = sourceResourceType_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.targetResourceType_ = targetResourceType_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.action_ = action_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.asset.v1p7beta1.RelationshipAttributes) {
return mergeFrom((com.google.cloud.asset.v1p7beta1.RelationshipAttributes) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.asset.v1p7beta1.RelationshipAttributes other) {
if (other == com.google.cloud.asset.v1p7beta1.RelationshipAttributes.getDefaultInstance())
return this;
if (!other.getType().isEmpty()) {
type_ = other.type_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getSourceResourceType().isEmpty()) {
sourceResourceType_ = other.sourceResourceType_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getTargetResourceType().isEmpty()) {
targetResourceType_ = other.targetResourceType_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getAction().isEmpty()) {
action_ = other.action_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
sourceResourceType_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 10
case 18:
{
targetResourceType_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 18
case 26:
{
action_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 26
case 34:
{
type_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object type_ = "";
/**
*
*
* <pre>
* The unique identifier of the relationship type. Example:
* `INSTANCE_TO_INSTANCEGROUP`
* </pre>
*
* <code>string type = 4;</code>
*
* @return The type.
*/
public java.lang.String getType() {
java.lang.Object ref = type_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
type_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The unique identifier of the relationship type. Example:
* `INSTANCE_TO_INSTANCEGROUP`
* </pre>
*
* <code>string type = 4;</code>
*
* @return The bytes for type.
*/
public com.google.protobuf.ByteString getTypeBytes() {
java.lang.Object ref = type_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
type_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The unique identifier of the relationship type. Example:
* `INSTANCE_TO_INSTANCEGROUP`
* </pre>
*
* <code>string type = 4;</code>
*
* @param value The type to set.
* @return This builder for chaining.
*/
public Builder setType(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
type_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The unique identifier of the relationship type. Example:
* `INSTANCE_TO_INSTANCEGROUP`
* </pre>
*
* <code>string type = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearType() {
type_ = getDefaultInstance().getType();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* The unique identifier of the relationship type. Example:
* `INSTANCE_TO_INSTANCEGROUP`
* </pre>
*
* <code>string type = 4;</code>
*
* @param value The bytes for type to set.
* @return This builder for chaining.
*/
public Builder setTypeBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
type_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object sourceResourceType_ = "";
/**
*
*
* <pre>
* The source asset type. Example: `compute.googleapis.com/Instance`
* </pre>
*
* <code>string source_resource_type = 1;</code>
*
* @return The sourceResourceType.
*/
public java.lang.String getSourceResourceType() {
java.lang.Object ref = sourceResourceType_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
sourceResourceType_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The source asset type. Example: `compute.googleapis.com/Instance`
* </pre>
*
* <code>string source_resource_type = 1;</code>
*
* @return The bytes for sourceResourceType.
*/
public com.google.protobuf.ByteString getSourceResourceTypeBytes() {
java.lang.Object ref = sourceResourceType_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
sourceResourceType_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The source asset type. Example: `compute.googleapis.com/Instance`
* </pre>
*
* <code>string source_resource_type = 1;</code>
*
* @param value The sourceResourceType to set.
* @return This builder for chaining.
*/
public Builder setSourceResourceType(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
sourceResourceType_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The source asset type. Example: `compute.googleapis.com/Instance`
* </pre>
*
* <code>string source_resource_type = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearSourceResourceType() {
sourceResourceType_ = getDefaultInstance().getSourceResourceType();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The source asset type. Example: `compute.googleapis.com/Instance`
* </pre>
*
* <code>string source_resource_type = 1;</code>
*
* @param value The bytes for sourceResourceType to set.
* @return This builder for chaining.
*/
public Builder setSourceResourceTypeBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
sourceResourceType_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object targetResourceType_ = "";
/**
*
*
* <pre>
* The target asset type. Example: `compute.googleapis.com/Disk`
* </pre>
*
* <code>string target_resource_type = 2;</code>
*
* @return The targetResourceType.
*/
public java.lang.String getTargetResourceType() {
java.lang.Object ref = targetResourceType_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
targetResourceType_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The target asset type. Example: `compute.googleapis.com/Disk`
* </pre>
*
* <code>string target_resource_type = 2;</code>
*
* @return The bytes for targetResourceType.
*/
public com.google.protobuf.ByteString getTargetResourceTypeBytes() {
java.lang.Object ref = targetResourceType_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
targetResourceType_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The target asset type. Example: `compute.googleapis.com/Disk`
* </pre>
*
* <code>string target_resource_type = 2;</code>
*
* @param value The targetResourceType to set.
* @return This builder for chaining.
*/
public Builder setTargetResourceType(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
targetResourceType_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The target asset type. Example: `compute.googleapis.com/Disk`
* </pre>
*
* <code>string target_resource_type = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearTargetResourceType() {
targetResourceType_ = getDefaultInstance().getTargetResourceType();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The target asset type. Example: `compute.googleapis.com/Disk`
* </pre>
*
* <code>string target_resource_type = 2;</code>
*
* @param value The bytes for targetResourceType to set.
* @return This builder for chaining.
*/
public Builder setTargetResourceTypeBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
targetResourceType_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object action_ = "";
/**
*
*
* <pre>
* The detail of the relationship, e.g. `contains`, `attaches`
* </pre>
*
* <code>string action = 3;</code>
*
* @return The action.
*/
public java.lang.String getAction() {
java.lang.Object ref = action_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
action_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The detail of the relationship, e.g. `contains`, `attaches`
* </pre>
*
* <code>string action = 3;</code>
*
* @return The bytes for action.
*/
public com.google.protobuf.ByteString getActionBytes() {
java.lang.Object ref = action_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
action_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The detail of the relationship, e.g. `contains`, `attaches`
* </pre>
*
* <code>string action = 3;</code>
*
* @param value The action to set.
* @return This builder for chaining.
*/
public Builder setAction(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
action_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* The detail of the relationship, e.g. `contains`, `attaches`
* </pre>
*
* <code>string action = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearAction() {
action_ = getDefaultInstance().getAction();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* The detail of the relationship, e.g. `contains`, `attaches`
* </pre>
*
* <code>string action = 3;</code>
*
* @param value The bytes for action to set.
* @return This builder for chaining.
*/
public Builder setActionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
action_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.asset.v1p7beta1.RelationshipAttributes)
}
// @@protoc_insertion_point(class_scope:google.cloud.asset.v1p7beta1.RelationshipAttributes)
private static final com.google.cloud.asset.v1p7beta1.RelationshipAttributes DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.asset.v1p7beta1.RelationshipAttributes();
}
public static com.google.cloud.asset.v1p7beta1.RelationshipAttributes getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<RelationshipAttributes> PARSER =
new com.google.protobuf.AbstractParser<RelationshipAttributes>() {
@java.lang.Override
public RelationshipAttributes parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<RelationshipAttributes> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<RelationshipAttributes> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.asset.v1p7beta1.RelationshipAttributes getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,971 | java-shopping-merchant-accounts/proto-google-shopping-merchant-accounts-v1/src/main/java/com/google/shopping/merchant/accounts/v1/UpdateBusinessInfoRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/shopping/merchant/accounts/v1/businessinfo.proto
// Protobuf Java Version: 3.25.8
package com.google.shopping.merchant.accounts.v1;
/**
*
*
* <pre>
* Request message for the `UpdateBusinessInfo` method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest}
*/
public final class UpdateBusinessInfoRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest)
UpdateBusinessInfoRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateBusinessInfoRequest.newBuilder() to construct.
private UpdateBusinessInfoRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateBusinessInfoRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateBusinessInfoRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.accounts.v1.BusinessInfoProto
.internal_static_google_shopping_merchant_accounts_v1_UpdateBusinessInfoRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.accounts.v1.BusinessInfoProto
.internal_static_google_shopping_merchant_accounts_v1_UpdateBusinessInfoRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest.class,
com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest.Builder.class);
}
private int bitField0_;
public static final int BUSINESS_INFO_FIELD_NUMBER = 1;
private com.google.shopping.merchant.accounts.v1.BusinessInfo businessInfo_;
/**
*
*
* <pre>
* Required. The new version of the business info.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.BusinessInfo business_info = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the businessInfo field is set.
*/
@java.lang.Override
public boolean hasBusinessInfo() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The new version of the business info.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.BusinessInfo business_info = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The businessInfo.
*/
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.BusinessInfo getBusinessInfo() {
return businessInfo_ == null
? com.google.shopping.merchant.accounts.v1.BusinessInfo.getDefaultInstance()
: businessInfo_;
}
/**
*
*
* <pre>
* Required. The new version of the business info.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.BusinessInfo business_info = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.BusinessInfoOrBuilder getBusinessInfoOrBuilder() {
return businessInfo_ == null
? com.google.shopping.merchant.accounts.v1.BusinessInfo.getDefaultInstance()
: businessInfo_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Optional. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `address`
* - `customer_service`
* - `korean_business_registration_number`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `address`
* - `customer_service`
* - `korean_business_registration_number`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Optional. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `address`
* - `customer_service`
* - `korean_business_registration_number`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getBusinessInfo());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getBusinessInfo());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest)) {
return super.equals(obj);
}
com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest other =
(com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest) obj;
if (hasBusinessInfo() != other.hasBusinessInfo()) return false;
if (hasBusinessInfo()) {
if (!getBusinessInfo().equals(other.getBusinessInfo())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasBusinessInfo()) {
hash = (37 * hash) + BUSINESS_INFO_FIELD_NUMBER;
hash = (53 * hash) + getBusinessInfo().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for the `UpdateBusinessInfo` method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest)
com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.accounts.v1.BusinessInfoProto
.internal_static_google_shopping_merchant_accounts_v1_UpdateBusinessInfoRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.accounts.v1.BusinessInfoProto
.internal_static_google_shopping_merchant_accounts_v1_UpdateBusinessInfoRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest.class,
com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest.Builder.class);
}
// Construct using
// com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getBusinessInfoFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
businessInfo_ = null;
if (businessInfoBuilder_ != null) {
businessInfoBuilder_.dispose();
businessInfoBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.shopping.merchant.accounts.v1.BusinessInfoProto
.internal_static_google_shopping_merchant_accounts_v1_UpdateBusinessInfoRequest_descriptor;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest
getDefaultInstanceForType() {
return com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest build() {
com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest buildPartial() {
com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest result =
new com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.businessInfo_ =
businessInfoBuilder_ == null ? businessInfo_ : businessInfoBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest) {
return mergeFrom(
(com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest other) {
if (other
== com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest
.getDefaultInstance()) return this;
if (other.hasBusinessInfo()) {
mergeBusinessInfo(other.getBusinessInfo());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getBusinessInfoFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.shopping.merchant.accounts.v1.BusinessInfo businessInfo_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.shopping.merchant.accounts.v1.BusinessInfo,
com.google.shopping.merchant.accounts.v1.BusinessInfo.Builder,
com.google.shopping.merchant.accounts.v1.BusinessInfoOrBuilder>
businessInfoBuilder_;
/**
*
*
* <pre>
* Required. The new version of the business info.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.BusinessInfo business_info = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the businessInfo field is set.
*/
public boolean hasBusinessInfo() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The new version of the business info.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.BusinessInfo business_info = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The businessInfo.
*/
public com.google.shopping.merchant.accounts.v1.BusinessInfo getBusinessInfo() {
if (businessInfoBuilder_ == null) {
return businessInfo_ == null
? com.google.shopping.merchant.accounts.v1.BusinessInfo.getDefaultInstance()
: businessInfo_;
} else {
return businessInfoBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The new version of the business info.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.BusinessInfo business_info = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setBusinessInfo(com.google.shopping.merchant.accounts.v1.BusinessInfo value) {
if (businessInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
businessInfo_ = value;
} else {
businessInfoBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The new version of the business info.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.BusinessInfo business_info = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setBusinessInfo(
com.google.shopping.merchant.accounts.v1.BusinessInfo.Builder builderForValue) {
if (businessInfoBuilder_ == null) {
businessInfo_ = builderForValue.build();
} else {
businessInfoBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The new version of the business info.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.BusinessInfo business_info = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeBusinessInfo(com.google.shopping.merchant.accounts.v1.BusinessInfo value) {
if (businessInfoBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& businessInfo_ != null
&& businessInfo_
!= com.google.shopping.merchant.accounts.v1.BusinessInfo.getDefaultInstance()) {
getBusinessInfoBuilder().mergeFrom(value);
} else {
businessInfo_ = value;
}
} else {
businessInfoBuilder_.mergeFrom(value);
}
if (businessInfo_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The new version of the business info.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.BusinessInfo business_info = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearBusinessInfo() {
bitField0_ = (bitField0_ & ~0x00000001);
businessInfo_ = null;
if (businessInfoBuilder_ != null) {
businessInfoBuilder_.dispose();
businessInfoBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The new version of the business info.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.BusinessInfo business_info = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.shopping.merchant.accounts.v1.BusinessInfo.Builder getBusinessInfoBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getBusinessInfoFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The new version of the business info.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.BusinessInfo business_info = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.shopping.merchant.accounts.v1.BusinessInfoOrBuilder
getBusinessInfoOrBuilder() {
if (businessInfoBuilder_ != null) {
return businessInfoBuilder_.getMessageOrBuilder();
} else {
return businessInfo_ == null
? com.google.shopping.merchant.accounts.v1.BusinessInfo.getDefaultInstance()
: businessInfo_;
}
}
/**
*
*
* <pre>
* Required. The new version of the business info.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.BusinessInfo business_info = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.shopping.merchant.accounts.v1.BusinessInfo,
com.google.shopping.merchant.accounts.v1.BusinessInfo.Builder,
com.google.shopping.merchant.accounts.v1.BusinessInfoOrBuilder>
getBusinessInfoFieldBuilder() {
if (businessInfoBuilder_ == null) {
businessInfoBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.shopping.merchant.accounts.v1.BusinessInfo,
com.google.shopping.merchant.accounts.v1.BusinessInfo.Builder,
com.google.shopping.merchant.accounts.v1.BusinessInfoOrBuilder>(
getBusinessInfo(), getParentForChildren(), isClean());
businessInfo_ = null;
}
return businessInfoBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Optional. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `address`
* - `customer_service`
* - `korean_business_registration_number`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `address`
* - `customer_service`
* - `korean_business_registration_number`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `address`
* - `customer_service`
* - `korean_business_registration_number`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `address`
* - `customer_service`
* - `korean_business_registration_number`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `address`
* - `customer_service`
* - `korean_business_registration_number`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `address`
* - `customer_service`
* - `korean_business_registration_number`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `address`
* - `customer_service`
* - `korean_business_registration_number`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `address`
* - `customer_service`
* - `korean_business_registration_number`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Optional. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `address`
* - `customer_service`
* - `korean_business_registration_number`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest)
}
// @@protoc_insertion_point(class_scope:google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest)
private static final com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest();
}
public static com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateBusinessInfoRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateBusinessInfoRequest>() {
@java.lang.Override
public UpdateBusinessInfoRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateBusinessInfoRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateBusinessInfoRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.UpdateBusinessInfoRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hive | 37,102 | standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InsertEventRequestData.java | /**
* Autogenerated by Thrift Compiler (0.16.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.hadoop.hive.metastore.api;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.16.0)")
@org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public class InsertEventRequestData implements org.apache.thrift.TBase<InsertEventRequestData, InsertEventRequestData._Fields>, java.io.Serializable, Cloneable, Comparable<InsertEventRequestData> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("InsertEventRequestData");
private static final org.apache.thrift.protocol.TField REPLACE_FIELD_DESC = new org.apache.thrift.protocol.TField("replace", org.apache.thrift.protocol.TType.BOOL, (short)1);
private static final org.apache.thrift.protocol.TField FILES_ADDED_FIELD_DESC = new org.apache.thrift.protocol.TField("filesAdded", org.apache.thrift.protocol.TType.LIST, (short)2);
private static final org.apache.thrift.protocol.TField FILES_ADDED_CHECKSUM_FIELD_DESC = new org.apache.thrift.protocol.TField("filesAddedChecksum", org.apache.thrift.protocol.TType.LIST, (short)3);
private static final org.apache.thrift.protocol.TField SUB_DIRECTORY_LIST_FIELD_DESC = new org.apache.thrift.protocol.TField("subDirectoryList", org.apache.thrift.protocol.TType.LIST, (short)4);
private static final org.apache.thrift.protocol.TField PARTITION_VAL_FIELD_DESC = new org.apache.thrift.protocol.TField("partitionVal", org.apache.thrift.protocol.TType.LIST, (short)5);
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new InsertEventRequestDataStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new InsertEventRequestDataTupleSchemeFactory();
private boolean replace; // optional
private @org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> filesAdded; // required
private @org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> filesAddedChecksum; // optional
private @org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> subDirectoryList; // optional
private @org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> partitionVal; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
REPLACE((short)1, "replace"),
FILES_ADDED((short)2, "filesAdded"),
FILES_ADDED_CHECKSUM((short)3, "filesAddedChecksum"),
SUB_DIRECTORY_LIST((short)4, "subDirectoryList"),
PARTITION_VAL((short)5, "partitionVal");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // REPLACE
return REPLACE;
case 2: // FILES_ADDED
return FILES_ADDED;
case 3: // FILES_ADDED_CHECKSUM
return FILES_ADDED_CHECKSUM;
case 4: // SUB_DIRECTORY_LIST
return SUB_DIRECTORY_LIST;
case 5: // PARTITION_VAL
return PARTITION_VAL;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __REPLACE_ISSET_ID = 0;
private byte __isset_bitfield = 0;
private static final _Fields optionals[] = {_Fields.REPLACE,_Fields.FILES_ADDED_CHECKSUM,_Fields.SUB_DIRECTORY_LIST,_Fields.PARTITION_VAL};
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.REPLACE, new org.apache.thrift.meta_data.FieldMetaData("replace", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
tmpMap.put(_Fields.FILES_ADDED, new org.apache.thrift.meta_data.FieldMetaData("filesAdded", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
tmpMap.put(_Fields.FILES_ADDED_CHECKSUM, new org.apache.thrift.meta_data.FieldMetaData("filesAddedChecksum", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
tmpMap.put(_Fields.SUB_DIRECTORY_LIST, new org.apache.thrift.meta_data.FieldMetaData("subDirectoryList", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
tmpMap.put(_Fields.PARTITION_VAL, new org.apache.thrift.meta_data.FieldMetaData("partitionVal", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(InsertEventRequestData.class, metaDataMap);
}
public InsertEventRequestData() {
}
public InsertEventRequestData(
java.util.List<java.lang.String> filesAdded)
{
this();
this.filesAdded = filesAdded;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public InsertEventRequestData(InsertEventRequestData other) {
__isset_bitfield = other.__isset_bitfield;
this.replace = other.replace;
if (other.isSetFilesAdded()) {
java.util.List<java.lang.String> __this__filesAdded = new java.util.ArrayList<java.lang.String>(other.filesAdded);
this.filesAdded = __this__filesAdded;
}
if (other.isSetFilesAddedChecksum()) {
java.util.List<java.lang.String> __this__filesAddedChecksum = new java.util.ArrayList<java.lang.String>(other.filesAddedChecksum);
this.filesAddedChecksum = __this__filesAddedChecksum;
}
if (other.isSetSubDirectoryList()) {
java.util.List<java.lang.String> __this__subDirectoryList = new java.util.ArrayList<java.lang.String>(other.subDirectoryList);
this.subDirectoryList = __this__subDirectoryList;
}
if (other.isSetPartitionVal()) {
java.util.List<java.lang.String> __this__partitionVal = new java.util.ArrayList<java.lang.String>(other.partitionVal);
this.partitionVal = __this__partitionVal;
}
}
public InsertEventRequestData deepCopy() {
return new InsertEventRequestData(this);
}
@Override
public void clear() {
setReplaceIsSet(false);
this.replace = false;
this.filesAdded = null;
this.filesAddedChecksum = null;
this.subDirectoryList = null;
this.partitionVal = null;
}
public boolean isReplace() {
return this.replace;
}
public void setReplace(boolean replace) {
this.replace = replace;
setReplaceIsSet(true);
}
public void unsetReplace() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __REPLACE_ISSET_ID);
}
/** Returns true if field replace is set (has been assigned a value) and false otherwise */
public boolean isSetReplace() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __REPLACE_ISSET_ID);
}
public void setReplaceIsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __REPLACE_ISSET_ID, value);
}
public int getFilesAddedSize() {
return (this.filesAdded == null) ? 0 : this.filesAdded.size();
}
@org.apache.thrift.annotation.Nullable
public java.util.Iterator<java.lang.String> getFilesAddedIterator() {
return (this.filesAdded == null) ? null : this.filesAdded.iterator();
}
public void addToFilesAdded(java.lang.String elem) {
if (this.filesAdded == null) {
this.filesAdded = new java.util.ArrayList<java.lang.String>();
}
this.filesAdded.add(elem);
}
@org.apache.thrift.annotation.Nullable
public java.util.List<java.lang.String> getFilesAdded() {
return this.filesAdded;
}
public void setFilesAdded(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> filesAdded) {
this.filesAdded = filesAdded;
}
public void unsetFilesAdded() {
this.filesAdded = null;
}
/** Returns true if field filesAdded is set (has been assigned a value) and false otherwise */
public boolean isSetFilesAdded() {
return this.filesAdded != null;
}
public void setFilesAddedIsSet(boolean value) {
if (!value) {
this.filesAdded = null;
}
}
public int getFilesAddedChecksumSize() {
return (this.filesAddedChecksum == null) ? 0 : this.filesAddedChecksum.size();
}
@org.apache.thrift.annotation.Nullable
public java.util.Iterator<java.lang.String> getFilesAddedChecksumIterator() {
return (this.filesAddedChecksum == null) ? null : this.filesAddedChecksum.iterator();
}
public void addToFilesAddedChecksum(java.lang.String elem) {
if (this.filesAddedChecksum == null) {
this.filesAddedChecksum = new java.util.ArrayList<java.lang.String>();
}
this.filesAddedChecksum.add(elem);
}
@org.apache.thrift.annotation.Nullable
public java.util.List<java.lang.String> getFilesAddedChecksum() {
return this.filesAddedChecksum;
}
public void setFilesAddedChecksum(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> filesAddedChecksum) {
this.filesAddedChecksum = filesAddedChecksum;
}
public void unsetFilesAddedChecksum() {
this.filesAddedChecksum = null;
}
/** Returns true if field filesAddedChecksum is set (has been assigned a value) and false otherwise */
public boolean isSetFilesAddedChecksum() {
return this.filesAddedChecksum != null;
}
public void setFilesAddedChecksumIsSet(boolean value) {
if (!value) {
this.filesAddedChecksum = null;
}
}
public int getSubDirectoryListSize() {
return (this.subDirectoryList == null) ? 0 : this.subDirectoryList.size();
}
@org.apache.thrift.annotation.Nullable
public java.util.Iterator<java.lang.String> getSubDirectoryListIterator() {
return (this.subDirectoryList == null) ? null : this.subDirectoryList.iterator();
}
public void addToSubDirectoryList(java.lang.String elem) {
if (this.subDirectoryList == null) {
this.subDirectoryList = new java.util.ArrayList<java.lang.String>();
}
this.subDirectoryList.add(elem);
}
@org.apache.thrift.annotation.Nullable
public java.util.List<java.lang.String> getSubDirectoryList() {
return this.subDirectoryList;
}
public void setSubDirectoryList(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> subDirectoryList) {
this.subDirectoryList = subDirectoryList;
}
public void unsetSubDirectoryList() {
this.subDirectoryList = null;
}
/** Returns true if field subDirectoryList is set (has been assigned a value) and false otherwise */
public boolean isSetSubDirectoryList() {
return this.subDirectoryList != null;
}
public void setSubDirectoryListIsSet(boolean value) {
if (!value) {
this.subDirectoryList = null;
}
}
public int getPartitionValSize() {
return (this.partitionVal == null) ? 0 : this.partitionVal.size();
}
@org.apache.thrift.annotation.Nullable
public java.util.Iterator<java.lang.String> getPartitionValIterator() {
return (this.partitionVal == null) ? null : this.partitionVal.iterator();
}
public void addToPartitionVal(java.lang.String elem) {
if (this.partitionVal == null) {
this.partitionVal = new java.util.ArrayList<java.lang.String>();
}
this.partitionVal.add(elem);
}
@org.apache.thrift.annotation.Nullable
public java.util.List<java.lang.String> getPartitionVal() {
return this.partitionVal;
}
public void setPartitionVal(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> partitionVal) {
this.partitionVal = partitionVal;
}
public void unsetPartitionVal() {
this.partitionVal = null;
}
/** Returns true if field partitionVal is set (has been assigned a value) and false otherwise */
public boolean isSetPartitionVal() {
return this.partitionVal != null;
}
public void setPartitionValIsSet(boolean value) {
if (!value) {
this.partitionVal = null;
}
}
public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
switch (field) {
case REPLACE:
if (value == null) {
unsetReplace();
} else {
setReplace((java.lang.Boolean)value);
}
break;
case FILES_ADDED:
if (value == null) {
unsetFilesAdded();
} else {
setFilesAdded((java.util.List<java.lang.String>)value);
}
break;
case FILES_ADDED_CHECKSUM:
if (value == null) {
unsetFilesAddedChecksum();
} else {
setFilesAddedChecksum((java.util.List<java.lang.String>)value);
}
break;
case SUB_DIRECTORY_LIST:
if (value == null) {
unsetSubDirectoryList();
} else {
setSubDirectoryList((java.util.List<java.lang.String>)value);
}
break;
case PARTITION_VAL:
if (value == null) {
unsetPartitionVal();
} else {
setPartitionVal((java.util.List<java.lang.String>)value);
}
break;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
case REPLACE:
return isReplace();
case FILES_ADDED:
return getFilesAdded();
case FILES_ADDED_CHECKSUM:
return getFilesAddedChecksum();
case SUB_DIRECTORY_LIST:
return getSubDirectoryList();
case PARTITION_VAL:
return getPartitionVal();
}
throw new java.lang.IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
case REPLACE:
return isSetReplace();
case FILES_ADDED:
return isSetFilesAdded();
case FILES_ADDED_CHECKSUM:
return isSetFilesAddedChecksum();
case SUB_DIRECTORY_LIST:
return isSetSubDirectoryList();
case PARTITION_VAL:
return isSetPartitionVal();
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that instanceof InsertEventRequestData)
return this.equals((InsertEventRequestData)that);
return false;
}
public boolean equals(InsertEventRequestData that) {
if (that == null)
return false;
if (this == that)
return true;
boolean this_present_replace = true && this.isSetReplace();
boolean that_present_replace = true && that.isSetReplace();
if (this_present_replace || that_present_replace) {
if (!(this_present_replace && that_present_replace))
return false;
if (this.replace != that.replace)
return false;
}
boolean this_present_filesAdded = true && this.isSetFilesAdded();
boolean that_present_filesAdded = true && that.isSetFilesAdded();
if (this_present_filesAdded || that_present_filesAdded) {
if (!(this_present_filesAdded && that_present_filesAdded))
return false;
if (!this.filesAdded.equals(that.filesAdded))
return false;
}
boolean this_present_filesAddedChecksum = true && this.isSetFilesAddedChecksum();
boolean that_present_filesAddedChecksum = true && that.isSetFilesAddedChecksum();
if (this_present_filesAddedChecksum || that_present_filesAddedChecksum) {
if (!(this_present_filesAddedChecksum && that_present_filesAddedChecksum))
return false;
if (!this.filesAddedChecksum.equals(that.filesAddedChecksum))
return false;
}
boolean this_present_subDirectoryList = true && this.isSetSubDirectoryList();
boolean that_present_subDirectoryList = true && that.isSetSubDirectoryList();
if (this_present_subDirectoryList || that_present_subDirectoryList) {
if (!(this_present_subDirectoryList && that_present_subDirectoryList))
return false;
if (!this.subDirectoryList.equals(that.subDirectoryList))
return false;
}
boolean this_present_partitionVal = true && this.isSetPartitionVal();
boolean that_present_partitionVal = true && that.isSetPartitionVal();
if (this_present_partitionVal || that_present_partitionVal) {
if (!(this_present_partitionVal && that_present_partitionVal))
return false;
if (!this.partitionVal.equals(that.partitionVal))
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = hashCode * 8191 + ((isSetReplace()) ? 131071 : 524287);
if (isSetReplace())
hashCode = hashCode * 8191 + ((replace) ? 131071 : 524287);
hashCode = hashCode * 8191 + ((isSetFilesAdded()) ? 131071 : 524287);
if (isSetFilesAdded())
hashCode = hashCode * 8191 + filesAdded.hashCode();
hashCode = hashCode * 8191 + ((isSetFilesAddedChecksum()) ? 131071 : 524287);
if (isSetFilesAddedChecksum())
hashCode = hashCode * 8191 + filesAddedChecksum.hashCode();
hashCode = hashCode * 8191 + ((isSetSubDirectoryList()) ? 131071 : 524287);
if (isSetSubDirectoryList())
hashCode = hashCode * 8191 + subDirectoryList.hashCode();
hashCode = hashCode * 8191 + ((isSetPartitionVal()) ? 131071 : 524287);
if (isSetPartitionVal())
hashCode = hashCode * 8191 + partitionVal.hashCode();
return hashCode;
}
@Override
public int compareTo(InsertEventRequestData other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = java.lang.Boolean.compare(isSetReplace(), other.isSetReplace());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetReplace()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.replace, other.replace);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetFilesAdded(), other.isSetFilesAdded());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetFilesAdded()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.filesAdded, other.filesAdded);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetFilesAddedChecksum(), other.isSetFilesAddedChecksum());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetFilesAddedChecksum()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.filesAddedChecksum, other.filesAddedChecksum);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetSubDirectoryList(), other.isSetSubDirectoryList());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetSubDirectoryList()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.subDirectoryList, other.subDirectoryList);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetPartitionVal(), other.isSetPartitionVal());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetPartitionVal()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.partitionVal, other.partitionVal);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
@org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("InsertEventRequestData(");
boolean first = true;
if (isSetReplace()) {
sb.append("replace:");
sb.append(this.replace);
first = false;
}
if (!first) sb.append(", ");
sb.append("filesAdded:");
if (this.filesAdded == null) {
sb.append("null");
} else {
sb.append(this.filesAdded);
}
first = false;
if (isSetFilesAddedChecksum()) {
if (!first) sb.append(", ");
sb.append("filesAddedChecksum:");
if (this.filesAddedChecksum == null) {
sb.append("null");
} else {
sb.append(this.filesAddedChecksum);
}
first = false;
}
if (isSetSubDirectoryList()) {
if (!first) sb.append(", ");
sb.append("subDirectoryList:");
if (this.subDirectoryList == null) {
sb.append("null");
} else {
sb.append(this.subDirectoryList);
}
first = false;
}
if (isSetPartitionVal()) {
if (!first) sb.append(", ");
sb.append("partitionVal:");
if (this.partitionVal == null) {
sb.append("null");
} else {
sb.append(this.partitionVal);
}
first = false;
}
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
if (!isSetFilesAdded()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'filesAdded' is unset! Struct:" + toString());
}
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class InsertEventRequestDataStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public InsertEventRequestDataStandardScheme getScheme() {
return new InsertEventRequestDataStandardScheme();
}
}
private static class InsertEventRequestDataStandardScheme extends org.apache.thrift.scheme.StandardScheme<InsertEventRequestData> {
public void read(org.apache.thrift.protocol.TProtocol iprot, InsertEventRequestData struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // REPLACE
if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) {
struct.replace = iprot.readBool();
struct.setReplaceIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // FILES_ADDED
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
org.apache.thrift.protocol.TList _list1024 = iprot.readListBegin();
struct.filesAdded = new java.util.ArrayList<java.lang.String>(_list1024.size);
@org.apache.thrift.annotation.Nullable java.lang.String _elem1025;
for (int _i1026 = 0; _i1026 < _list1024.size; ++_i1026)
{
_elem1025 = iprot.readString();
struct.filesAdded.add(_elem1025);
}
iprot.readListEnd();
}
struct.setFilesAddedIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 3: // FILES_ADDED_CHECKSUM
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
org.apache.thrift.protocol.TList _list1027 = iprot.readListBegin();
struct.filesAddedChecksum = new java.util.ArrayList<java.lang.String>(_list1027.size);
@org.apache.thrift.annotation.Nullable java.lang.String _elem1028;
for (int _i1029 = 0; _i1029 < _list1027.size; ++_i1029)
{
_elem1028 = iprot.readString();
struct.filesAddedChecksum.add(_elem1028);
}
iprot.readListEnd();
}
struct.setFilesAddedChecksumIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 4: // SUB_DIRECTORY_LIST
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
org.apache.thrift.protocol.TList _list1030 = iprot.readListBegin();
struct.subDirectoryList = new java.util.ArrayList<java.lang.String>(_list1030.size);
@org.apache.thrift.annotation.Nullable java.lang.String _elem1031;
for (int _i1032 = 0; _i1032 < _list1030.size; ++_i1032)
{
_elem1031 = iprot.readString();
struct.subDirectoryList.add(_elem1031);
}
iprot.readListEnd();
}
struct.setSubDirectoryListIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 5: // PARTITION_VAL
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
org.apache.thrift.protocol.TList _list1033 = iprot.readListBegin();
struct.partitionVal = new java.util.ArrayList<java.lang.String>(_list1033.size);
@org.apache.thrift.annotation.Nullable java.lang.String _elem1034;
for (int _i1035 = 0; _i1035 < _list1033.size; ++_i1035)
{
_elem1034 = iprot.readString();
struct.partitionVal.add(_elem1034);
}
iprot.readListEnd();
}
struct.setPartitionValIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, InsertEventRequestData struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.isSetReplace()) {
oprot.writeFieldBegin(REPLACE_FIELD_DESC);
oprot.writeBool(struct.replace);
oprot.writeFieldEnd();
}
if (struct.filesAdded != null) {
oprot.writeFieldBegin(FILES_ADDED_FIELD_DESC);
{
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.filesAdded.size()));
for (java.lang.String _iter1036 : struct.filesAdded)
{
oprot.writeString(_iter1036);
}
oprot.writeListEnd();
}
oprot.writeFieldEnd();
}
if (struct.filesAddedChecksum != null) {
if (struct.isSetFilesAddedChecksum()) {
oprot.writeFieldBegin(FILES_ADDED_CHECKSUM_FIELD_DESC);
{
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.filesAddedChecksum.size()));
for (java.lang.String _iter1037 : struct.filesAddedChecksum)
{
oprot.writeString(_iter1037);
}
oprot.writeListEnd();
}
oprot.writeFieldEnd();
}
}
if (struct.subDirectoryList != null) {
if (struct.isSetSubDirectoryList()) {
oprot.writeFieldBegin(SUB_DIRECTORY_LIST_FIELD_DESC);
{
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.subDirectoryList.size()));
for (java.lang.String _iter1038 : struct.subDirectoryList)
{
oprot.writeString(_iter1038);
}
oprot.writeListEnd();
}
oprot.writeFieldEnd();
}
}
if (struct.partitionVal != null) {
if (struct.isSetPartitionVal()) {
oprot.writeFieldBegin(PARTITION_VAL_FIELD_DESC);
{
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.partitionVal.size()));
for (java.lang.String _iter1039 : struct.partitionVal)
{
oprot.writeString(_iter1039);
}
oprot.writeListEnd();
}
oprot.writeFieldEnd();
}
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class InsertEventRequestDataTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public InsertEventRequestDataTupleScheme getScheme() {
return new InsertEventRequestDataTupleScheme();
}
}
private static class InsertEventRequestDataTupleScheme extends org.apache.thrift.scheme.TupleScheme<InsertEventRequestData> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, InsertEventRequestData struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
{
oprot.writeI32(struct.filesAdded.size());
for (java.lang.String _iter1040 : struct.filesAdded)
{
oprot.writeString(_iter1040);
}
}
java.util.BitSet optionals = new java.util.BitSet();
if (struct.isSetReplace()) {
optionals.set(0);
}
if (struct.isSetFilesAddedChecksum()) {
optionals.set(1);
}
if (struct.isSetSubDirectoryList()) {
optionals.set(2);
}
if (struct.isSetPartitionVal()) {
optionals.set(3);
}
oprot.writeBitSet(optionals, 4);
if (struct.isSetReplace()) {
oprot.writeBool(struct.replace);
}
if (struct.isSetFilesAddedChecksum()) {
{
oprot.writeI32(struct.filesAddedChecksum.size());
for (java.lang.String _iter1041 : struct.filesAddedChecksum)
{
oprot.writeString(_iter1041);
}
}
}
if (struct.isSetSubDirectoryList()) {
{
oprot.writeI32(struct.subDirectoryList.size());
for (java.lang.String _iter1042 : struct.subDirectoryList)
{
oprot.writeString(_iter1042);
}
}
}
if (struct.isSetPartitionVal()) {
{
oprot.writeI32(struct.partitionVal.size());
for (java.lang.String _iter1043 : struct.partitionVal)
{
oprot.writeString(_iter1043);
}
}
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, InsertEventRequestData struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
{
org.apache.thrift.protocol.TList _list1044 = iprot.readListBegin(org.apache.thrift.protocol.TType.STRING);
struct.filesAdded = new java.util.ArrayList<java.lang.String>(_list1044.size);
@org.apache.thrift.annotation.Nullable java.lang.String _elem1045;
for (int _i1046 = 0; _i1046 < _list1044.size; ++_i1046)
{
_elem1045 = iprot.readString();
struct.filesAdded.add(_elem1045);
}
}
struct.setFilesAddedIsSet(true);
java.util.BitSet incoming = iprot.readBitSet(4);
if (incoming.get(0)) {
struct.replace = iprot.readBool();
struct.setReplaceIsSet(true);
}
if (incoming.get(1)) {
{
org.apache.thrift.protocol.TList _list1047 = iprot.readListBegin(org.apache.thrift.protocol.TType.STRING);
struct.filesAddedChecksum = new java.util.ArrayList<java.lang.String>(_list1047.size);
@org.apache.thrift.annotation.Nullable java.lang.String _elem1048;
for (int _i1049 = 0; _i1049 < _list1047.size; ++_i1049)
{
_elem1048 = iprot.readString();
struct.filesAddedChecksum.add(_elem1048);
}
}
struct.setFilesAddedChecksumIsSet(true);
}
if (incoming.get(2)) {
{
org.apache.thrift.protocol.TList _list1050 = iprot.readListBegin(org.apache.thrift.protocol.TType.STRING);
struct.subDirectoryList = new java.util.ArrayList<java.lang.String>(_list1050.size);
@org.apache.thrift.annotation.Nullable java.lang.String _elem1051;
for (int _i1052 = 0; _i1052 < _list1050.size; ++_i1052)
{
_elem1051 = iprot.readString();
struct.subDirectoryList.add(_elem1051);
}
}
struct.setSubDirectoryListIsSet(true);
}
if (incoming.get(3)) {
{
org.apache.thrift.protocol.TList _list1053 = iprot.readListBegin(org.apache.thrift.protocol.TType.STRING);
struct.partitionVal = new java.util.ArrayList<java.lang.String>(_list1053.size);
@org.apache.thrift.annotation.Nullable java.lang.String _elem1054;
for (int _i1055 = 0; _i1055 < _list1053.size; ++_i1055)
{
_elem1054 = iprot.readString();
struct.partitionVal.add(_elem1054);
}
}
struct.setPartitionValIsSet(true);
}
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
}
}
|
apache/rya | 37,111 | extras/indexing/src/main/java/org/apache/rya/indexing/accumulo/freetext/query/ASTSimpleNode.java | /* Generated By:JJTree: Do not edit this line. ASTSimpleNode.java Version 4.3 */
/* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */
package org.apache.rya.indexing.accumulo.freetext.query;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
public
class ASTSimpleNode extends SimpleNode {
public ASTSimpleNode(int id) {
super(id);
}
public ASTSimpleNode(QueryParser p, int id) {
super(p, id);
}
}
/* JavaCC - OriginalChecksum=8a57fc385ee56c7039cbbc4132eb8e0c (do not edit this line) */
|
googleads/google-ads-java | 37,109 | google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/services/ListInvoicesRequest.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v19/services/invoice_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v19.services;
/**
* <pre>
* Request message for fetching the invoices of a given billing setup that were
* issued during a given month.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.services.ListInvoicesRequest}
*/
public final class ListInvoicesRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v19.services.ListInvoicesRequest)
ListInvoicesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListInvoicesRequest.newBuilder() to construct.
private ListInvoicesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListInvoicesRequest() {
customerId_ = "";
billingSetup_ = "";
issueYear_ = "";
issueMonth_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ListInvoicesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.services.InvoiceServiceProto.internal_static_google_ads_googleads_v19_services_ListInvoicesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.services.InvoiceServiceProto.internal_static_google_ads_googleads_v19_services_ListInvoicesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.services.ListInvoicesRequest.class, com.google.ads.googleads.v19.services.ListInvoicesRequest.Builder.class);
}
public static final int CUSTOMER_ID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object customerId_ = "";
/**
* <pre>
* Required. The ID of the customer to fetch invoices for.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The customerId.
*/
@java.lang.Override
public java.lang.String getCustomerId() {
java.lang.Object ref = customerId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customerId_ = s;
return s;
}
}
/**
* <pre>
* Required. The ID of the customer to fetch invoices for.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for customerId.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getCustomerIdBytes() {
java.lang.Object ref = customerId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customerId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int BILLING_SETUP_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object billingSetup_ = "";
/**
* <pre>
* Required. The billing setup resource name of the requested invoices.
*
* `customers/{customer_id}/billingSetups/{billing_setup_id}`
* </pre>
*
* <code>string billing_setup = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The billingSetup.
*/
@java.lang.Override
public java.lang.String getBillingSetup() {
java.lang.Object ref = billingSetup_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
billingSetup_ = s;
return s;
}
}
/**
* <pre>
* Required. The billing setup resource name of the requested invoices.
*
* `customers/{customer_id}/billingSetups/{billing_setup_id}`
* </pre>
*
* <code>string billing_setup = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for billingSetup.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getBillingSetupBytes() {
java.lang.Object ref = billingSetup_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
billingSetup_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ISSUE_YEAR_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object issueYear_ = "";
/**
* <pre>
* Required. The issue year to retrieve invoices, in yyyy format. Only
* invoices issued in 2019 or later can be retrieved.
* </pre>
*
* <code>string issue_year = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The issueYear.
*/
@java.lang.Override
public java.lang.String getIssueYear() {
java.lang.Object ref = issueYear_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
issueYear_ = s;
return s;
}
}
/**
* <pre>
* Required. The issue year to retrieve invoices, in yyyy format. Only
* invoices issued in 2019 or later can be retrieved.
* </pre>
*
* <code>string issue_year = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for issueYear.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getIssueYearBytes() {
java.lang.Object ref = issueYear_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
issueYear_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ISSUE_MONTH_FIELD_NUMBER = 4;
private int issueMonth_ = 0;
/**
* <pre>
* Required. The issue month to retrieve invoices.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.MonthOfYearEnum.MonthOfYear issue_month = 4 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The enum numeric value on the wire for issueMonth.
*/
@java.lang.Override public int getIssueMonthValue() {
return issueMonth_;
}
/**
* <pre>
* Required. The issue month to retrieve invoices.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.MonthOfYearEnum.MonthOfYear issue_month = 4 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The issueMonth.
*/
@java.lang.Override public com.google.ads.googleads.v19.enums.MonthOfYearEnum.MonthOfYear getIssueMonth() {
com.google.ads.googleads.v19.enums.MonthOfYearEnum.MonthOfYear result = com.google.ads.googleads.v19.enums.MonthOfYearEnum.MonthOfYear.forNumber(issueMonth_);
return result == null ? com.google.ads.googleads.v19.enums.MonthOfYearEnum.MonthOfYear.UNRECOGNIZED : result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, customerId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(billingSetup_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, billingSetup_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(issueYear_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, issueYear_);
}
if (issueMonth_ != com.google.ads.googleads.v19.enums.MonthOfYearEnum.MonthOfYear.UNSPECIFIED.getNumber()) {
output.writeEnum(4, issueMonth_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, customerId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(billingSetup_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, billingSetup_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(issueYear_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, issueYear_);
}
if (issueMonth_ != com.google.ads.googleads.v19.enums.MonthOfYearEnum.MonthOfYear.UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(4, issueMonth_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v19.services.ListInvoicesRequest)) {
return super.equals(obj);
}
com.google.ads.googleads.v19.services.ListInvoicesRequest other = (com.google.ads.googleads.v19.services.ListInvoicesRequest) obj;
if (!getCustomerId()
.equals(other.getCustomerId())) return false;
if (!getBillingSetup()
.equals(other.getBillingSetup())) return false;
if (!getIssueYear()
.equals(other.getIssueYear())) return false;
if (issueMonth_ != other.issueMonth_) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + CUSTOMER_ID_FIELD_NUMBER;
hash = (53 * hash) + getCustomerId().hashCode();
hash = (37 * hash) + BILLING_SETUP_FIELD_NUMBER;
hash = (53 * hash) + getBillingSetup().hashCode();
hash = (37 * hash) + ISSUE_YEAR_FIELD_NUMBER;
hash = (53 * hash) + getIssueYear().hashCode();
hash = (37 * hash) + ISSUE_MONTH_FIELD_NUMBER;
hash = (53 * hash) + issueMonth_;
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v19.services.ListInvoicesRequest parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.ListInvoicesRequest parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.ListInvoicesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.ListInvoicesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.ListInvoicesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.ListInvoicesRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.ListInvoicesRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.ListInvoicesRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.ListInvoicesRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.ListInvoicesRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.ListInvoicesRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.ListInvoicesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v19.services.ListInvoicesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Request message for fetching the invoices of a given billing setup that were
* issued during a given month.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.services.ListInvoicesRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.services.ListInvoicesRequest)
com.google.ads.googleads.v19.services.ListInvoicesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.services.InvoiceServiceProto.internal_static_google_ads_googleads_v19_services_ListInvoicesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.services.InvoiceServiceProto.internal_static_google_ads_googleads_v19_services_ListInvoicesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.services.ListInvoicesRequest.class, com.google.ads.googleads.v19.services.ListInvoicesRequest.Builder.class);
}
// Construct using com.google.ads.googleads.v19.services.ListInvoicesRequest.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
customerId_ = "";
billingSetup_ = "";
issueYear_ = "";
issueMonth_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v19.services.InvoiceServiceProto.internal_static_google_ads_googleads_v19_services_ListInvoicesRequest_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.ListInvoicesRequest getDefaultInstanceForType() {
return com.google.ads.googleads.v19.services.ListInvoicesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v19.services.ListInvoicesRequest build() {
com.google.ads.googleads.v19.services.ListInvoicesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.ListInvoicesRequest buildPartial() {
com.google.ads.googleads.v19.services.ListInvoicesRequest result = new com.google.ads.googleads.v19.services.ListInvoicesRequest(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v19.services.ListInvoicesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.customerId_ = customerId_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.billingSetup_ = billingSetup_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.issueYear_ = issueYear_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.issueMonth_ = issueMonth_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v19.services.ListInvoicesRequest) {
return mergeFrom((com.google.ads.googleads.v19.services.ListInvoicesRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v19.services.ListInvoicesRequest other) {
if (other == com.google.ads.googleads.v19.services.ListInvoicesRequest.getDefaultInstance()) return this;
if (!other.getCustomerId().isEmpty()) {
customerId_ = other.customerId_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getBillingSetup().isEmpty()) {
billingSetup_ = other.billingSetup_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getIssueYear().isEmpty()) {
issueYear_ = other.issueYear_;
bitField0_ |= 0x00000004;
onChanged();
}
if (other.issueMonth_ != 0) {
setIssueMonthValue(other.getIssueMonthValue());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
customerId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
billingSetup_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26: {
issueYear_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 32: {
issueMonth_ = input.readEnum();
bitField0_ |= 0x00000008;
break;
} // case 32
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object customerId_ = "";
/**
* <pre>
* Required. The ID of the customer to fetch invoices for.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The customerId.
*/
public java.lang.String getCustomerId() {
java.lang.Object ref = customerId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customerId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The ID of the customer to fetch invoices for.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for customerId.
*/
public com.google.protobuf.ByteString
getCustomerIdBytes() {
java.lang.Object ref = customerId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customerId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The ID of the customer to fetch invoices for.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The customerId to set.
* @return This builder for chaining.
*/
public Builder setCustomerId(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
customerId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* Required. The ID of the customer to fetch invoices for.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearCustomerId() {
customerId_ = getDefaultInstance().getCustomerId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <pre>
* Required. The ID of the customer to fetch invoices for.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes for customerId to set.
* @return This builder for chaining.
*/
public Builder setCustomerIdBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
customerId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object billingSetup_ = "";
/**
* <pre>
* Required. The billing setup resource name of the requested invoices.
*
* `customers/{customer_id}/billingSetups/{billing_setup_id}`
* </pre>
*
* <code>string billing_setup = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The billingSetup.
*/
public java.lang.String getBillingSetup() {
java.lang.Object ref = billingSetup_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
billingSetup_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The billing setup resource name of the requested invoices.
*
* `customers/{customer_id}/billingSetups/{billing_setup_id}`
* </pre>
*
* <code>string billing_setup = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for billingSetup.
*/
public com.google.protobuf.ByteString
getBillingSetupBytes() {
java.lang.Object ref = billingSetup_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
billingSetup_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The billing setup resource name of the requested invoices.
*
* `customers/{customer_id}/billingSetups/{billing_setup_id}`
* </pre>
*
* <code>string billing_setup = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The billingSetup to set.
* @return This builder for chaining.
*/
public Builder setBillingSetup(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
billingSetup_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Required. The billing setup resource name of the requested invoices.
*
* `customers/{customer_id}/billingSetups/{billing_setup_id}`
* </pre>
*
* <code>string billing_setup = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearBillingSetup() {
billingSetup_ = getDefaultInstance().getBillingSetup();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* Required. The billing setup resource name of the requested invoices.
*
* `customers/{customer_id}/billingSetups/{billing_setup_id}`
* </pre>
*
* <code>string billing_setup = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes for billingSetup to set.
* @return This builder for chaining.
*/
public Builder setBillingSetupBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
billingSetup_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object issueYear_ = "";
/**
* <pre>
* Required. The issue year to retrieve invoices, in yyyy format. Only
* invoices issued in 2019 or later can be retrieved.
* </pre>
*
* <code>string issue_year = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The issueYear.
*/
public java.lang.String getIssueYear() {
java.lang.Object ref = issueYear_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
issueYear_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The issue year to retrieve invoices, in yyyy format. Only
* invoices issued in 2019 or later can be retrieved.
* </pre>
*
* <code>string issue_year = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for issueYear.
*/
public com.google.protobuf.ByteString
getIssueYearBytes() {
java.lang.Object ref = issueYear_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
issueYear_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The issue year to retrieve invoices, in yyyy format. Only
* invoices issued in 2019 or later can be retrieved.
* </pre>
*
* <code>string issue_year = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The issueYear to set.
* @return This builder for chaining.
*/
public Builder setIssueYear(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
issueYear_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Required. The issue year to retrieve invoices, in yyyy format. Only
* invoices issued in 2019 or later can be retrieved.
* </pre>
*
* <code>string issue_year = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearIssueYear() {
issueYear_ = getDefaultInstance().getIssueYear();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* <pre>
* Required. The issue year to retrieve invoices, in yyyy format. Only
* invoices issued in 2019 or later can be retrieved.
* </pre>
*
* <code>string issue_year = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes for issueYear to set.
* @return This builder for chaining.
*/
public Builder setIssueYearBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
issueYear_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private int issueMonth_ = 0;
/**
* <pre>
* Required. The issue month to retrieve invoices.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.MonthOfYearEnum.MonthOfYear issue_month = 4 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The enum numeric value on the wire for issueMonth.
*/
@java.lang.Override public int getIssueMonthValue() {
return issueMonth_;
}
/**
* <pre>
* Required. The issue month to retrieve invoices.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.MonthOfYearEnum.MonthOfYear issue_month = 4 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The enum numeric value on the wire for issueMonth to set.
* @return This builder for chaining.
*/
public Builder setIssueMonthValue(int value) {
issueMonth_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* <pre>
* Required. The issue month to retrieve invoices.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.MonthOfYearEnum.MonthOfYear issue_month = 4 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The issueMonth.
*/
@java.lang.Override
public com.google.ads.googleads.v19.enums.MonthOfYearEnum.MonthOfYear getIssueMonth() {
com.google.ads.googleads.v19.enums.MonthOfYearEnum.MonthOfYear result = com.google.ads.googleads.v19.enums.MonthOfYearEnum.MonthOfYear.forNumber(issueMonth_);
return result == null ? com.google.ads.googleads.v19.enums.MonthOfYearEnum.MonthOfYear.UNRECOGNIZED : result;
}
/**
* <pre>
* Required. The issue month to retrieve invoices.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.MonthOfYearEnum.MonthOfYear issue_month = 4 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The issueMonth to set.
* @return This builder for chaining.
*/
public Builder setIssueMonth(com.google.ads.googleads.v19.enums.MonthOfYearEnum.MonthOfYear value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
issueMonth_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* Required. The issue month to retrieve invoices.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.MonthOfYearEnum.MonthOfYear issue_month = 4 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearIssueMonth() {
bitField0_ = (bitField0_ & ~0x00000008);
issueMonth_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.services.ListInvoicesRequest)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v19.services.ListInvoicesRequest)
private static final com.google.ads.googleads.v19.services.ListInvoicesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v19.services.ListInvoicesRequest();
}
public static com.google.ads.googleads.v19.services.ListInvoicesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListInvoicesRequest>
PARSER = new com.google.protobuf.AbstractParser<ListInvoicesRequest>() {
@java.lang.Override
public ListInvoicesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListInvoicesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListInvoicesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.ListInvoicesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 37,109 | google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/services/ListInvoicesRequest.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v20/services/invoice_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v20.services;
/**
* <pre>
* Request message for fetching the invoices of a given billing setup that were
* issued during a given month.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.services.ListInvoicesRequest}
*/
public final class ListInvoicesRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v20.services.ListInvoicesRequest)
ListInvoicesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListInvoicesRequest.newBuilder() to construct.
private ListInvoicesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListInvoicesRequest() {
customerId_ = "";
billingSetup_ = "";
issueYear_ = "";
issueMonth_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ListInvoicesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.services.InvoiceServiceProto.internal_static_google_ads_googleads_v20_services_ListInvoicesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.services.InvoiceServiceProto.internal_static_google_ads_googleads_v20_services_ListInvoicesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.services.ListInvoicesRequest.class, com.google.ads.googleads.v20.services.ListInvoicesRequest.Builder.class);
}
public static final int CUSTOMER_ID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object customerId_ = "";
/**
* <pre>
* Required. The ID of the customer to fetch invoices for.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The customerId.
*/
@java.lang.Override
public java.lang.String getCustomerId() {
java.lang.Object ref = customerId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customerId_ = s;
return s;
}
}
/**
* <pre>
* Required. The ID of the customer to fetch invoices for.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for customerId.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getCustomerIdBytes() {
java.lang.Object ref = customerId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customerId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int BILLING_SETUP_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object billingSetup_ = "";
/**
* <pre>
* Required. The billing setup resource name of the requested invoices.
*
* `customers/{customer_id}/billingSetups/{billing_setup_id}`
* </pre>
*
* <code>string billing_setup = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The billingSetup.
*/
@java.lang.Override
public java.lang.String getBillingSetup() {
java.lang.Object ref = billingSetup_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
billingSetup_ = s;
return s;
}
}
/**
* <pre>
* Required. The billing setup resource name of the requested invoices.
*
* `customers/{customer_id}/billingSetups/{billing_setup_id}`
* </pre>
*
* <code>string billing_setup = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for billingSetup.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getBillingSetupBytes() {
java.lang.Object ref = billingSetup_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
billingSetup_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ISSUE_YEAR_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object issueYear_ = "";
/**
* <pre>
* Required. The issue year to retrieve invoices, in yyyy format. Only
* invoices issued in 2019 or later can be retrieved.
* </pre>
*
* <code>string issue_year = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The issueYear.
*/
@java.lang.Override
public java.lang.String getIssueYear() {
java.lang.Object ref = issueYear_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
issueYear_ = s;
return s;
}
}
/**
* <pre>
* Required. The issue year to retrieve invoices, in yyyy format. Only
* invoices issued in 2019 or later can be retrieved.
* </pre>
*
* <code>string issue_year = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for issueYear.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getIssueYearBytes() {
java.lang.Object ref = issueYear_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
issueYear_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ISSUE_MONTH_FIELD_NUMBER = 4;
private int issueMonth_ = 0;
/**
* <pre>
* Required. The issue month to retrieve invoices.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.MonthOfYearEnum.MonthOfYear issue_month = 4 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The enum numeric value on the wire for issueMonth.
*/
@java.lang.Override public int getIssueMonthValue() {
return issueMonth_;
}
/**
* <pre>
* Required. The issue month to retrieve invoices.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.MonthOfYearEnum.MonthOfYear issue_month = 4 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The issueMonth.
*/
@java.lang.Override public com.google.ads.googleads.v20.enums.MonthOfYearEnum.MonthOfYear getIssueMonth() {
com.google.ads.googleads.v20.enums.MonthOfYearEnum.MonthOfYear result = com.google.ads.googleads.v20.enums.MonthOfYearEnum.MonthOfYear.forNumber(issueMonth_);
return result == null ? com.google.ads.googleads.v20.enums.MonthOfYearEnum.MonthOfYear.UNRECOGNIZED : result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, customerId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(billingSetup_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, billingSetup_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(issueYear_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, issueYear_);
}
if (issueMonth_ != com.google.ads.googleads.v20.enums.MonthOfYearEnum.MonthOfYear.UNSPECIFIED.getNumber()) {
output.writeEnum(4, issueMonth_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, customerId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(billingSetup_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, billingSetup_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(issueYear_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, issueYear_);
}
if (issueMonth_ != com.google.ads.googleads.v20.enums.MonthOfYearEnum.MonthOfYear.UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(4, issueMonth_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v20.services.ListInvoicesRequest)) {
return super.equals(obj);
}
com.google.ads.googleads.v20.services.ListInvoicesRequest other = (com.google.ads.googleads.v20.services.ListInvoicesRequest) obj;
if (!getCustomerId()
.equals(other.getCustomerId())) return false;
if (!getBillingSetup()
.equals(other.getBillingSetup())) return false;
if (!getIssueYear()
.equals(other.getIssueYear())) return false;
if (issueMonth_ != other.issueMonth_) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + CUSTOMER_ID_FIELD_NUMBER;
hash = (53 * hash) + getCustomerId().hashCode();
hash = (37 * hash) + BILLING_SETUP_FIELD_NUMBER;
hash = (53 * hash) + getBillingSetup().hashCode();
hash = (37 * hash) + ISSUE_YEAR_FIELD_NUMBER;
hash = (53 * hash) + getIssueYear().hashCode();
hash = (37 * hash) + ISSUE_MONTH_FIELD_NUMBER;
hash = (53 * hash) + issueMonth_;
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v20.services.ListInvoicesRequest parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.ListInvoicesRequest parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.ListInvoicesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.ListInvoicesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.ListInvoicesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.ListInvoicesRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.ListInvoicesRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.ListInvoicesRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.ListInvoicesRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.ListInvoicesRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.ListInvoicesRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.ListInvoicesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v20.services.ListInvoicesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Request message for fetching the invoices of a given billing setup that were
* issued during a given month.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.services.ListInvoicesRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.services.ListInvoicesRequest)
com.google.ads.googleads.v20.services.ListInvoicesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.services.InvoiceServiceProto.internal_static_google_ads_googleads_v20_services_ListInvoicesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.services.InvoiceServiceProto.internal_static_google_ads_googleads_v20_services_ListInvoicesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.services.ListInvoicesRequest.class, com.google.ads.googleads.v20.services.ListInvoicesRequest.Builder.class);
}
// Construct using com.google.ads.googleads.v20.services.ListInvoicesRequest.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
customerId_ = "";
billingSetup_ = "";
issueYear_ = "";
issueMonth_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v20.services.InvoiceServiceProto.internal_static_google_ads_googleads_v20_services_ListInvoicesRequest_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.ListInvoicesRequest getDefaultInstanceForType() {
return com.google.ads.googleads.v20.services.ListInvoicesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v20.services.ListInvoicesRequest build() {
com.google.ads.googleads.v20.services.ListInvoicesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.ListInvoicesRequest buildPartial() {
com.google.ads.googleads.v20.services.ListInvoicesRequest result = new com.google.ads.googleads.v20.services.ListInvoicesRequest(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v20.services.ListInvoicesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.customerId_ = customerId_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.billingSetup_ = billingSetup_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.issueYear_ = issueYear_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.issueMonth_ = issueMonth_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v20.services.ListInvoicesRequest) {
return mergeFrom((com.google.ads.googleads.v20.services.ListInvoicesRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v20.services.ListInvoicesRequest other) {
if (other == com.google.ads.googleads.v20.services.ListInvoicesRequest.getDefaultInstance()) return this;
if (!other.getCustomerId().isEmpty()) {
customerId_ = other.customerId_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getBillingSetup().isEmpty()) {
billingSetup_ = other.billingSetup_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getIssueYear().isEmpty()) {
issueYear_ = other.issueYear_;
bitField0_ |= 0x00000004;
onChanged();
}
if (other.issueMonth_ != 0) {
setIssueMonthValue(other.getIssueMonthValue());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
customerId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
billingSetup_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26: {
issueYear_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 32: {
issueMonth_ = input.readEnum();
bitField0_ |= 0x00000008;
break;
} // case 32
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object customerId_ = "";
/**
* <pre>
* Required. The ID of the customer to fetch invoices for.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The customerId.
*/
public java.lang.String getCustomerId() {
java.lang.Object ref = customerId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customerId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The ID of the customer to fetch invoices for.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for customerId.
*/
public com.google.protobuf.ByteString
getCustomerIdBytes() {
java.lang.Object ref = customerId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customerId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The ID of the customer to fetch invoices for.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The customerId to set.
* @return This builder for chaining.
*/
public Builder setCustomerId(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
customerId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* Required. The ID of the customer to fetch invoices for.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearCustomerId() {
customerId_ = getDefaultInstance().getCustomerId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <pre>
* Required. The ID of the customer to fetch invoices for.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes for customerId to set.
* @return This builder for chaining.
*/
public Builder setCustomerIdBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
customerId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object billingSetup_ = "";
/**
* <pre>
* Required. The billing setup resource name of the requested invoices.
*
* `customers/{customer_id}/billingSetups/{billing_setup_id}`
* </pre>
*
* <code>string billing_setup = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The billingSetup.
*/
public java.lang.String getBillingSetup() {
java.lang.Object ref = billingSetup_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
billingSetup_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The billing setup resource name of the requested invoices.
*
* `customers/{customer_id}/billingSetups/{billing_setup_id}`
* </pre>
*
* <code>string billing_setup = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for billingSetup.
*/
public com.google.protobuf.ByteString
getBillingSetupBytes() {
java.lang.Object ref = billingSetup_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
billingSetup_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The billing setup resource name of the requested invoices.
*
* `customers/{customer_id}/billingSetups/{billing_setup_id}`
* </pre>
*
* <code>string billing_setup = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The billingSetup to set.
* @return This builder for chaining.
*/
public Builder setBillingSetup(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
billingSetup_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Required. The billing setup resource name of the requested invoices.
*
* `customers/{customer_id}/billingSetups/{billing_setup_id}`
* </pre>
*
* <code>string billing_setup = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearBillingSetup() {
billingSetup_ = getDefaultInstance().getBillingSetup();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* Required. The billing setup resource name of the requested invoices.
*
* `customers/{customer_id}/billingSetups/{billing_setup_id}`
* </pre>
*
* <code>string billing_setup = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes for billingSetup to set.
* @return This builder for chaining.
*/
public Builder setBillingSetupBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
billingSetup_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object issueYear_ = "";
/**
* <pre>
* Required. The issue year to retrieve invoices, in yyyy format. Only
* invoices issued in 2019 or later can be retrieved.
* </pre>
*
* <code>string issue_year = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The issueYear.
*/
public java.lang.String getIssueYear() {
java.lang.Object ref = issueYear_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
issueYear_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The issue year to retrieve invoices, in yyyy format. Only
* invoices issued in 2019 or later can be retrieved.
* </pre>
*
* <code>string issue_year = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for issueYear.
*/
public com.google.protobuf.ByteString
getIssueYearBytes() {
java.lang.Object ref = issueYear_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
issueYear_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The issue year to retrieve invoices, in yyyy format. Only
* invoices issued in 2019 or later can be retrieved.
* </pre>
*
* <code>string issue_year = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The issueYear to set.
* @return This builder for chaining.
*/
public Builder setIssueYear(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
issueYear_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Required. The issue year to retrieve invoices, in yyyy format. Only
* invoices issued in 2019 or later can be retrieved.
* </pre>
*
* <code>string issue_year = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearIssueYear() {
issueYear_ = getDefaultInstance().getIssueYear();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* <pre>
* Required. The issue year to retrieve invoices, in yyyy format. Only
* invoices issued in 2019 or later can be retrieved.
* </pre>
*
* <code>string issue_year = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes for issueYear to set.
* @return This builder for chaining.
*/
public Builder setIssueYearBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
issueYear_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private int issueMonth_ = 0;
/**
* <pre>
* Required. The issue month to retrieve invoices.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.MonthOfYearEnum.MonthOfYear issue_month = 4 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The enum numeric value on the wire for issueMonth.
*/
@java.lang.Override public int getIssueMonthValue() {
return issueMonth_;
}
/**
* <pre>
* Required. The issue month to retrieve invoices.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.MonthOfYearEnum.MonthOfYear issue_month = 4 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The enum numeric value on the wire for issueMonth to set.
* @return This builder for chaining.
*/
public Builder setIssueMonthValue(int value) {
issueMonth_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* <pre>
* Required. The issue month to retrieve invoices.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.MonthOfYearEnum.MonthOfYear issue_month = 4 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The issueMonth.
*/
@java.lang.Override
public com.google.ads.googleads.v20.enums.MonthOfYearEnum.MonthOfYear getIssueMonth() {
com.google.ads.googleads.v20.enums.MonthOfYearEnum.MonthOfYear result = com.google.ads.googleads.v20.enums.MonthOfYearEnum.MonthOfYear.forNumber(issueMonth_);
return result == null ? com.google.ads.googleads.v20.enums.MonthOfYearEnum.MonthOfYear.UNRECOGNIZED : result;
}
/**
* <pre>
* Required. The issue month to retrieve invoices.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.MonthOfYearEnum.MonthOfYear issue_month = 4 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The issueMonth to set.
* @return This builder for chaining.
*/
public Builder setIssueMonth(com.google.ads.googleads.v20.enums.MonthOfYearEnum.MonthOfYear value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
issueMonth_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* Required. The issue month to retrieve invoices.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.MonthOfYearEnum.MonthOfYear issue_month = 4 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearIssueMonth() {
bitField0_ = (bitField0_ & ~0x00000008);
issueMonth_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.services.ListInvoicesRequest)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v20.services.ListInvoicesRequest)
private static final com.google.ads.googleads.v20.services.ListInvoicesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v20.services.ListInvoicesRequest();
}
public static com.google.ads.googleads.v20.services.ListInvoicesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListInvoicesRequest>
PARSER = new com.google.protobuf.AbstractParser<ListInvoicesRequest>() {
@java.lang.Override
public ListInvoicesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListInvoicesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListInvoicesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.ListInvoicesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 37,109 | google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/services/ListInvoicesRequest.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v21/services/invoice_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v21.services;
/**
* <pre>
* Request message for fetching the invoices of a given billing setup that were
* issued during a given month.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.services.ListInvoicesRequest}
*/
public final class ListInvoicesRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v21.services.ListInvoicesRequest)
ListInvoicesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListInvoicesRequest.newBuilder() to construct.
private ListInvoicesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListInvoicesRequest() {
customerId_ = "";
billingSetup_ = "";
issueYear_ = "";
issueMonth_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ListInvoicesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.services.InvoiceServiceProto.internal_static_google_ads_googleads_v21_services_ListInvoicesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.services.InvoiceServiceProto.internal_static_google_ads_googleads_v21_services_ListInvoicesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.services.ListInvoicesRequest.class, com.google.ads.googleads.v21.services.ListInvoicesRequest.Builder.class);
}
public static final int CUSTOMER_ID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object customerId_ = "";
/**
* <pre>
* Required. The ID of the customer to fetch invoices for.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The customerId.
*/
@java.lang.Override
public java.lang.String getCustomerId() {
java.lang.Object ref = customerId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customerId_ = s;
return s;
}
}
/**
* <pre>
* Required. The ID of the customer to fetch invoices for.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for customerId.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getCustomerIdBytes() {
java.lang.Object ref = customerId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customerId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int BILLING_SETUP_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object billingSetup_ = "";
/**
* <pre>
* Required. The billing setup resource name of the requested invoices.
*
* `customers/{customer_id}/billingSetups/{billing_setup_id}`
* </pre>
*
* <code>string billing_setup = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The billingSetup.
*/
@java.lang.Override
public java.lang.String getBillingSetup() {
java.lang.Object ref = billingSetup_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
billingSetup_ = s;
return s;
}
}
/**
* <pre>
* Required. The billing setup resource name of the requested invoices.
*
* `customers/{customer_id}/billingSetups/{billing_setup_id}`
* </pre>
*
* <code>string billing_setup = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for billingSetup.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getBillingSetupBytes() {
java.lang.Object ref = billingSetup_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
billingSetup_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ISSUE_YEAR_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object issueYear_ = "";
/**
* <pre>
* Required. The issue year to retrieve invoices, in yyyy format. Only
* invoices issued in 2019 or later can be retrieved.
* </pre>
*
* <code>string issue_year = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The issueYear.
*/
@java.lang.Override
public java.lang.String getIssueYear() {
java.lang.Object ref = issueYear_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
issueYear_ = s;
return s;
}
}
/**
* <pre>
* Required. The issue year to retrieve invoices, in yyyy format. Only
* invoices issued in 2019 or later can be retrieved.
* </pre>
*
* <code>string issue_year = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for issueYear.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getIssueYearBytes() {
java.lang.Object ref = issueYear_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
issueYear_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ISSUE_MONTH_FIELD_NUMBER = 4;
private int issueMonth_ = 0;
/**
* <pre>
* Required. The issue month to retrieve invoices.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.MonthOfYearEnum.MonthOfYear issue_month = 4 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The enum numeric value on the wire for issueMonth.
*/
@java.lang.Override public int getIssueMonthValue() {
return issueMonth_;
}
/**
* <pre>
* Required. The issue month to retrieve invoices.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.MonthOfYearEnum.MonthOfYear issue_month = 4 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The issueMonth.
*/
@java.lang.Override public com.google.ads.googleads.v21.enums.MonthOfYearEnum.MonthOfYear getIssueMonth() {
com.google.ads.googleads.v21.enums.MonthOfYearEnum.MonthOfYear result = com.google.ads.googleads.v21.enums.MonthOfYearEnum.MonthOfYear.forNumber(issueMonth_);
return result == null ? com.google.ads.googleads.v21.enums.MonthOfYearEnum.MonthOfYear.UNRECOGNIZED : result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, customerId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(billingSetup_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, billingSetup_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(issueYear_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, issueYear_);
}
if (issueMonth_ != com.google.ads.googleads.v21.enums.MonthOfYearEnum.MonthOfYear.UNSPECIFIED.getNumber()) {
output.writeEnum(4, issueMonth_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, customerId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(billingSetup_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, billingSetup_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(issueYear_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, issueYear_);
}
if (issueMonth_ != com.google.ads.googleads.v21.enums.MonthOfYearEnum.MonthOfYear.UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(4, issueMonth_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v21.services.ListInvoicesRequest)) {
return super.equals(obj);
}
com.google.ads.googleads.v21.services.ListInvoicesRequest other = (com.google.ads.googleads.v21.services.ListInvoicesRequest) obj;
if (!getCustomerId()
.equals(other.getCustomerId())) return false;
if (!getBillingSetup()
.equals(other.getBillingSetup())) return false;
if (!getIssueYear()
.equals(other.getIssueYear())) return false;
if (issueMonth_ != other.issueMonth_) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + CUSTOMER_ID_FIELD_NUMBER;
hash = (53 * hash) + getCustomerId().hashCode();
hash = (37 * hash) + BILLING_SETUP_FIELD_NUMBER;
hash = (53 * hash) + getBillingSetup().hashCode();
hash = (37 * hash) + ISSUE_YEAR_FIELD_NUMBER;
hash = (53 * hash) + getIssueYear().hashCode();
hash = (37 * hash) + ISSUE_MONTH_FIELD_NUMBER;
hash = (53 * hash) + issueMonth_;
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v21.services.ListInvoicesRequest parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.ListInvoicesRequest parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListInvoicesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.ListInvoicesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListInvoicesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.ListInvoicesRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListInvoicesRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.ListInvoicesRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListInvoicesRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.ListInvoicesRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListInvoicesRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.ListInvoicesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v21.services.ListInvoicesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Request message for fetching the invoices of a given billing setup that were
* issued during a given month.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.services.ListInvoicesRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.services.ListInvoicesRequest)
com.google.ads.googleads.v21.services.ListInvoicesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.services.InvoiceServiceProto.internal_static_google_ads_googleads_v21_services_ListInvoicesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.services.InvoiceServiceProto.internal_static_google_ads_googleads_v21_services_ListInvoicesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.services.ListInvoicesRequest.class, com.google.ads.googleads.v21.services.ListInvoicesRequest.Builder.class);
}
// Construct using com.google.ads.googleads.v21.services.ListInvoicesRequest.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
customerId_ = "";
billingSetup_ = "";
issueYear_ = "";
issueMonth_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v21.services.InvoiceServiceProto.internal_static_google_ads_googleads_v21_services_ListInvoicesRequest_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.ListInvoicesRequest getDefaultInstanceForType() {
return com.google.ads.googleads.v21.services.ListInvoicesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v21.services.ListInvoicesRequest build() {
com.google.ads.googleads.v21.services.ListInvoicesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.ListInvoicesRequest buildPartial() {
com.google.ads.googleads.v21.services.ListInvoicesRequest result = new com.google.ads.googleads.v21.services.ListInvoicesRequest(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v21.services.ListInvoicesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.customerId_ = customerId_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.billingSetup_ = billingSetup_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.issueYear_ = issueYear_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.issueMonth_ = issueMonth_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v21.services.ListInvoicesRequest) {
return mergeFrom((com.google.ads.googleads.v21.services.ListInvoicesRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v21.services.ListInvoicesRequest other) {
if (other == com.google.ads.googleads.v21.services.ListInvoicesRequest.getDefaultInstance()) return this;
if (!other.getCustomerId().isEmpty()) {
customerId_ = other.customerId_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getBillingSetup().isEmpty()) {
billingSetup_ = other.billingSetup_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getIssueYear().isEmpty()) {
issueYear_ = other.issueYear_;
bitField0_ |= 0x00000004;
onChanged();
}
if (other.issueMonth_ != 0) {
setIssueMonthValue(other.getIssueMonthValue());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
customerId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
billingSetup_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26: {
issueYear_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 32: {
issueMonth_ = input.readEnum();
bitField0_ |= 0x00000008;
break;
} // case 32
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object customerId_ = "";
/**
* <pre>
* Required. The ID of the customer to fetch invoices for.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The customerId.
*/
public java.lang.String getCustomerId() {
java.lang.Object ref = customerId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customerId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The ID of the customer to fetch invoices for.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for customerId.
*/
public com.google.protobuf.ByteString
getCustomerIdBytes() {
java.lang.Object ref = customerId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customerId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The ID of the customer to fetch invoices for.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The customerId to set.
* @return This builder for chaining.
*/
public Builder setCustomerId(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
customerId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* Required. The ID of the customer to fetch invoices for.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearCustomerId() {
customerId_ = getDefaultInstance().getCustomerId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <pre>
* Required. The ID of the customer to fetch invoices for.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes for customerId to set.
* @return This builder for chaining.
*/
public Builder setCustomerIdBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
customerId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object billingSetup_ = "";
/**
* <pre>
* Required. The billing setup resource name of the requested invoices.
*
* `customers/{customer_id}/billingSetups/{billing_setup_id}`
* </pre>
*
* <code>string billing_setup = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The billingSetup.
*/
public java.lang.String getBillingSetup() {
java.lang.Object ref = billingSetup_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
billingSetup_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The billing setup resource name of the requested invoices.
*
* `customers/{customer_id}/billingSetups/{billing_setup_id}`
* </pre>
*
* <code>string billing_setup = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for billingSetup.
*/
public com.google.protobuf.ByteString
getBillingSetupBytes() {
java.lang.Object ref = billingSetup_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
billingSetup_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The billing setup resource name of the requested invoices.
*
* `customers/{customer_id}/billingSetups/{billing_setup_id}`
* </pre>
*
* <code>string billing_setup = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The billingSetup to set.
* @return This builder for chaining.
*/
public Builder setBillingSetup(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
billingSetup_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Required. The billing setup resource name of the requested invoices.
*
* `customers/{customer_id}/billingSetups/{billing_setup_id}`
* </pre>
*
* <code>string billing_setup = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearBillingSetup() {
billingSetup_ = getDefaultInstance().getBillingSetup();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* Required. The billing setup resource name of the requested invoices.
*
* `customers/{customer_id}/billingSetups/{billing_setup_id}`
* </pre>
*
* <code>string billing_setup = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes for billingSetup to set.
* @return This builder for chaining.
*/
public Builder setBillingSetupBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
billingSetup_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object issueYear_ = "";
/**
* <pre>
* Required. The issue year to retrieve invoices, in yyyy format. Only
* invoices issued in 2019 or later can be retrieved.
* </pre>
*
* <code>string issue_year = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The issueYear.
*/
public java.lang.String getIssueYear() {
java.lang.Object ref = issueYear_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
issueYear_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The issue year to retrieve invoices, in yyyy format. Only
* invoices issued in 2019 or later can be retrieved.
* </pre>
*
* <code>string issue_year = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for issueYear.
*/
public com.google.protobuf.ByteString
getIssueYearBytes() {
java.lang.Object ref = issueYear_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
issueYear_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The issue year to retrieve invoices, in yyyy format. Only
* invoices issued in 2019 or later can be retrieved.
* </pre>
*
* <code>string issue_year = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The issueYear to set.
* @return This builder for chaining.
*/
public Builder setIssueYear(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
issueYear_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Required. The issue year to retrieve invoices, in yyyy format. Only
* invoices issued in 2019 or later can be retrieved.
* </pre>
*
* <code>string issue_year = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearIssueYear() {
issueYear_ = getDefaultInstance().getIssueYear();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* <pre>
* Required. The issue year to retrieve invoices, in yyyy format. Only
* invoices issued in 2019 or later can be retrieved.
* </pre>
*
* <code>string issue_year = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes for issueYear to set.
* @return This builder for chaining.
*/
public Builder setIssueYearBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
issueYear_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private int issueMonth_ = 0;
/**
* <pre>
* Required. The issue month to retrieve invoices.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.MonthOfYearEnum.MonthOfYear issue_month = 4 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The enum numeric value on the wire for issueMonth.
*/
@java.lang.Override public int getIssueMonthValue() {
return issueMonth_;
}
/**
* <pre>
* Required. The issue month to retrieve invoices.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.MonthOfYearEnum.MonthOfYear issue_month = 4 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The enum numeric value on the wire for issueMonth to set.
* @return This builder for chaining.
*/
public Builder setIssueMonthValue(int value) {
issueMonth_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* <pre>
* Required. The issue month to retrieve invoices.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.MonthOfYearEnum.MonthOfYear issue_month = 4 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The issueMonth.
*/
@java.lang.Override
public com.google.ads.googleads.v21.enums.MonthOfYearEnum.MonthOfYear getIssueMonth() {
com.google.ads.googleads.v21.enums.MonthOfYearEnum.MonthOfYear result = com.google.ads.googleads.v21.enums.MonthOfYearEnum.MonthOfYear.forNumber(issueMonth_);
return result == null ? com.google.ads.googleads.v21.enums.MonthOfYearEnum.MonthOfYear.UNRECOGNIZED : result;
}
/**
* <pre>
* Required. The issue month to retrieve invoices.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.MonthOfYearEnum.MonthOfYear issue_month = 4 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The issueMonth to set.
* @return This builder for chaining.
*/
public Builder setIssueMonth(com.google.ads.googleads.v21.enums.MonthOfYearEnum.MonthOfYear value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
issueMonth_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* Required. The issue month to retrieve invoices.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.MonthOfYearEnum.MonthOfYear issue_month = 4 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearIssueMonth() {
bitField0_ = (bitField0_ & ~0x00000008);
issueMonth_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.services.ListInvoicesRequest)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v21.services.ListInvoicesRequest)
private static final com.google.ads.googleads.v21.services.ListInvoicesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v21.services.ListInvoicesRequest();
}
public static com.google.ads.googleads.v21.services.ListInvoicesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListInvoicesRequest>
PARSER = new com.google.protobuf.AbstractParser<ListInvoicesRequest>() {
@java.lang.Override
public ListInvoicesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListInvoicesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListInvoicesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.ListInvoicesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
google/j2objc | 37,186 | jre_emul/android/platform/libcore/ojluni/src/main/java/java/util/PriorityQueue.java | /*
* Copyright (c) 2003, 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package java.util;
import java.util.function.Consumer;
import java.util.function.Predicate;
/**
* An unbounded priority {@linkplain Queue queue} based on a priority heap.
* The elements of the priority queue are ordered according to their
* {@linkplain Comparable natural ordering}, or by a {@link Comparator}
* provided at queue construction time, depending on which constructor is
* used. A priority queue does not permit {@code null} elements.
* A priority queue relying on natural ordering also does not permit
* insertion of non-comparable objects (doing so may result in
* {@code ClassCastException}).
*
* <p>The <em>head</em> of this queue is the <em>least</em> element
* with respect to the specified ordering. If multiple elements are
* tied for least value, the head is one of those elements -- ties are
* broken arbitrarily. The queue retrieval operations {@code poll},
* {@code remove}, {@code peek}, and {@code element} access the
* element at the head of the queue.
*
* <p>A priority queue is unbounded, but has an internal
* <i>capacity</i> governing the size of an array used to store the
* elements on the queue. It is always at least as large as the queue
* size. As elements are added to a priority queue, its capacity
* grows automatically. The details of the growth policy are not
* specified.
*
* <p>This class and its iterator implement all of the
* <em>optional</em> methods of the {@link Collection} and {@link
* Iterator} interfaces. The Iterator provided in method {@link
* #iterator()} and the Spliterator provided in method {@link #spliterator()}
* are <em>not</em> guaranteed to traverse the elements of
* the priority queue in any particular order. If you need ordered
* traversal, consider using {@code Arrays.sort(pq.toArray())}.
*
* <p><strong>Note that this implementation is not synchronized.</strong>
* Multiple threads should not access a {@code PriorityQueue}
* instance concurrently if any of the threads modifies the queue.
* Instead, use the thread-safe {@link
* java.util.concurrent.PriorityBlockingQueue} class.
*
* <p>Implementation note: this implementation provides
* O(log(n)) time for the enqueuing and dequeuing methods
* ({@code offer}, {@code poll}, {@code remove()} and {@code add});
* linear time for the {@code remove(Object)} and {@code contains(Object)}
* methods; and constant time for the retrieval methods
* ({@code peek}, {@code element}, and {@code size}).
*
* <p>This class is a member of the
* <a href="{@docRoot}/java.base/java/util/package-summary.html#CollectionsFramework">
* Java Collections Framework</a>.
*
* @since 1.5
* @author Josh Bloch, Doug Lea
* @param <E> the type of elements held in this queue
*/
@SuppressWarnings("unchecked")
public class PriorityQueue<E> extends AbstractQueue<E>
implements java.io.Serializable {
private static final long serialVersionUID = -7720805057305804111L;
private static final int DEFAULT_INITIAL_CAPACITY = 11;
/**
* Priority queue represented as a balanced binary heap: the two
* children of queue[n] are queue[2*n+1] and queue[2*(n+1)]. The
* priority queue is ordered by comparator, or by the elements'
* natural ordering, if comparator is null: For each node n in the
* heap and each descendant d of n, n <= d. The element with the
* lowest value is in queue[0], assuming the queue is nonempty.
*/
transient Object[] queue; // non-private to simplify nested class access
/**
* The number of elements in the priority queue.
*/
int size;
/**
* The comparator, or null if priority queue uses elements'
* natural ordering.
*/
private final Comparator<? super E> comparator;
/**
* The number of times this priority queue has been
* <i>structurally modified</i>. See AbstractList for gory details.
*/
transient int modCount; // non-private to simplify nested class access
/**
* Creates a {@code PriorityQueue} with the default initial
* capacity (11) that orders its elements according to their
* {@linkplain Comparable natural ordering}.
*/
public PriorityQueue() {
this(DEFAULT_INITIAL_CAPACITY, null);
}
/**
* Creates a {@code PriorityQueue} with the specified initial
* capacity that orders its elements according to their
* {@linkplain Comparable natural ordering}.
*
* @param initialCapacity the initial capacity for this priority queue
* @throws IllegalArgumentException if {@code initialCapacity} is less
* than 1
*/
public PriorityQueue(int initialCapacity) {
this(initialCapacity, null);
}
/**
* Creates a {@code PriorityQueue} with the default initial capacity and
* whose elements are ordered according to the specified comparator.
*
* @param comparator the comparator that will be used to order this
* priority queue. If {@code null}, the {@linkplain Comparable
* natural ordering} of the elements will be used.
* @since 1.8
*/
public PriorityQueue(Comparator<? super E> comparator) {
this(DEFAULT_INITIAL_CAPACITY, comparator);
}
/**
* Creates a {@code PriorityQueue} with the specified initial capacity
* that orders its elements according to the specified comparator.
*
* @param initialCapacity the initial capacity for this priority queue
* @param comparator the comparator that will be used to order this
* priority queue. If {@code null}, the {@linkplain Comparable
* natural ordering} of the elements will be used.
* @throws IllegalArgumentException if {@code initialCapacity} is
* less than 1
*/
public PriorityQueue(int initialCapacity,
Comparator<? super E> comparator) {
// Note: This restriction of at least one is not actually needed,
// but continues for 1.5 compatibility
if (initialCapacity < 1)
throw new IllegalArgumentException();
this.queue = new Object[initialCapacity];
this.comparator = comparator;
}
/**
* Creates a {@code PriorityQueue} containing the elements in the
* specified collection. If the specified collection is an instance of
* a {@link SortedSet} or is another {@code PriorityQueue}, this
* priority queue will be ordered according to the same ordering.
* Otherwise, this priority queue will be ordered according to the
* {@linkplain Comparable natural ordering} of its elements.
*
* @param c the collection whose elements are to be placed
* into this priority queue
* @throws ClassCastException if elements of the specified collection
* cannot be compared to one another according to the priority
* queue's ordering
* @throws NullPointerException if the specified collection or any
* of its elements are null
*/
public PriorityQueue(Collection<? extends E> c) {
if (c instanceof SortedSet<?>) {
SortedSet<? extends E> ss = (SortedSet<? extends E>) c;
this.comparator = (Comparator<? super E>) ss.comparator();
initElementsFromCollection(ss);
}
else if (c instanceof PriorityQueue<?>) {
PriorityQueue<? extends E> pq = (PriorityQueue<? extends E>) c;
this.comparator = (Comparator<? super E>) pq.comparator();
initFromPriorityQueue(pq);
}
else {
this.comparator = null;
initFromCollection(c);
}
}
/**
* Creates a {@code PriorityQueue} containing the elements in the
* specified priority queue. This priority queue will be
* ordered according to the same ordering as the given priority
* queue.
*
* @param c the priority queue whose elements are to be placed
* into this priority queue
* @throws ClassCastException if elements of {@code c} cannot be
* compared to one another according to {@code c}'s
* ordering
* @throws NullPointerException if the specified priority queue or any
* of its elements are null
*/
public PriorityQueue(PriorityQueue<? extends E> c) {
this.comparator = (Comparator<? super E>) c.comparator();
initFromPriorityQueue(c);
}
/**
* Creates a {@code PriorityQueue} containing the elements in the
* specified sorted set. This priority queue will be ordered
* according to the same ordering as the given sorted set.
*
* @param c the sorted set whose elements are to be placed
* into this priority queue
* @throws ClassCastException if elements of the specified sorted
* set cannot be compared to one another according to the
* sorted set's ordering
* @throws NullPointerException if the specified sorted set or any
* of its elements are null
*/
public PriorityQueue(SortedSet<? extends E> c) {
this.comparator = (Comparator<? super E>) c.comparator();
initElementsFromCollection(c);
}
/** Ensures that queue[0] exists, helping peek() and poll(). */
private static Object[] ensureNonEmpty(Object[] es) {
return (es.length > 0) ? es : new Object[1];
}
private void initFromPriorityQueue(PriorityQueue<? extends E> c) {
if (c.getClass() == PriorityQueue.class) {
this.queue = ensureNonEmpty(c.toArray());
this.size = c.size();
} else {
initFromCollection(c);
}
}
private void initElementsFromCollection(Collection<? extends E> c) {
Object[] es = c.toArray();
int len = es.length;
if (c.getClass() != ArrayList.class)
es = Arrays.copyOf(es, len, Object[].class);
if (len == 1 || this.comparator != null)
for (Object e : es)
if (e == null)
throw new NullPointerException();
this.queue = ensureNonEmpty(es);
this.size = len;
}
/**
* Initializes queue array with elements from the given Collection.
*
* @param c the collection
*/
private void initFromCollection(Collection<? extends E> c) {
initElementsFromCollection(c);
heapify();
}
/**
* The maximum size of array to allocate.
* Some VMs reserve some header words in an array.
* Attempts to allocate larger arrays may result in
* OutOfMemoryError: Requested array size exceeds VM limit
*/
private static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8;
/**
* Increases the capacity of the array.
*
* @param minCapacity the desired minimum capacity
*/
private void grow(int minCapacity) {
int oldCapacity = queue.length;
// Double size if small; else grow by 50%
int newCapacity = oldCapacity + ((oldCapacity < 64) ?
(oldCapacity + 2) :
(oldCapacity >> 1));
// overflow-conscious code
if (newCapacity - MAX_ARRAY_SIZE > 0)
newCapacity = hugeCapacity(minCapacity);
queue = Arrays.copyOf(queue, newCapacity);
}
private static int hugeCapacity(int minCapacity) {
if (minCapacity < 0) // overflow
throw new OutOfMemoryError();
return (minCapacity > MAX_ARRAY_SIZE) ?
Integer.MAX_VALUE :
MAX_ARRAY_SIZE;
}
/**
* Inserts the specified element into this priority queue.
*
* @return {@code true} (as specified by {@link Collection#add})
* @throws ClassCastException if the specified element cannot be
* compared with elements currently in this priority queue
* according to the priority queue's ordering
* @throws NullPointerException if the specified element is null
*/
public boolean add(E e) {
return offer(e);
}
/**
* Inserts the specified element into this priority queue.
*
* @return {@code true} (as specified by {@link Queue#offer})
* @throws ClassCastException if the specified element cannot be
* compared with elements currently in this priority queue
* according to the priority queue's ordering
* @throws NullPointerException if the specified element is null
*/
public boolean offer(E e) {
if (e == null)
throw new NullPointerException();
modCount++;
int i = size;
if (i >= queue.length)
grow(i + 1);
size = i + 1;
if (i == 0)
queue[0] = e;
else
siftUp(i, e);
return true;
}
public E peek() {
return (E) queue[0];
}
private int indexOf(Object o) {
if (o != null) {
final Object[] es = queue;
for (int i = 0, n = size; i < n; i++)
if (o.equals(es[i]))
return i;
}
return -1;
}
/**
* Removes a single instance of the specified element from this queue,
* if it is present. More formally, removes an element {@code e} such
* that {@code o.equals(e)}, if this queue contains one or more such
* elements. Returns {@code true} if and only if this queue contained
* the specified element (or equivalently, if this queue changed as a
* result of the call).
*
* @param o element to be removed from this queue, if present
* @return {@code true} if this queue changed as a result of the call
*/
public boolean remove(Object o) {
int i = indexOf(o);
if (i == -1)
return false;
else {
removeAt(i);
return true;
}
}
/**
* Identity-based version for use in Itr.remove.
*
* @param o element to be removed from this queue, if present
*/
void removeEq(Object o) {
final Object[] es = queue;
for (int i = 0, n = size; i < n; i++) {
if (o == es[i]) {
removeAt(i);
break;
}
}
}
/**
* Returns {@code true} if this queue contains the specified element.
* More formally, returns {@code true} if and only if this queue contains
* at least one element {@code e} such that {@code o.equals(e)}.
*
* @param o object to be checked for containment in this queue
* @return {@code true} if this queue contains the specified element
*/
public boolean contains(Object o) {
return indexOf(o) >= 0;
}
/**
* Returns an array containing all of the elements in this queue.
* The elements are in no particular order.
*
* <p>The returned array will be "safe" in that no references to it are
* maintained by this queue. (In other words, this method must allocate
* a new array). The caller is thus free to modify the returned array.
*
* <p>This method acts as bridge between array-based and collection-based
* APIs.
*
* @return an array containing all of the elements in this queue
*/
public Object[] toArray() {
return Arrays.copyOf(queue, size);
}
/**
* Returns an array containing all of the elements in this queue; the
* runtime type of the returned array is that of the specified array.
* The returned array elements are in no particular order.
* If the queue fits in the specified array, it is returned therein.
* Otherwise, a new array is allocated with the runtime type of the
* specified array and the size of this queue.
*
* <p>If the queue fits in the specified array with room to spare
* (i.e., the array has more elements than the queue), the element in
* the array immediately following the end of the collection is set to
* {@code null}.
*
* <p>Like the {@link #toArray()} method, this method acts as bridge between
* array-based and collection-based APIs. Further, this method allows
* precise control over the runtime type of the output array, and may,
* under certain circumstances, be used to save allocation costs.
*
* <p>Suppose {@code x} is a queue known to contain only strings.
* The following code can be used to dump the queue into a newly
* allocated array of {@code String}:
*
* <pre> {@code String[] y = x.toArray(new String[0]);}</pre>
*
* Note that {@code toArray(new Object[0])} is identical in function to
* {@code toArray()}.
*
* @param a the array into which the elements of the queue are to
* be stored, if it is big enough; otherwise, a new array of the
* same runtime type is allocated for this purpose.
* @return an array containing all of the elements in this queue
* @throws ArrayStoreException if the runtime type of the specified array
* is not a supertype of the runtime type of every element in
* this queue
* @throws NullPointerException if the specified array is null
*/
public <T> T[] toArray(T[] a) {
final int size = this.size;
if (a.length < size)
// Make a new array of a's runtime type, but my contents:
return (T[]) Arrays.copyOf(queue, size, a.getClass());
System.arraycopy(queue, 0, a, 0, size);
if (a.length > size)
a[size] = null;
return a;
}
/**
* Returns an iterator over the elements in this queue. The iterator
* does not return the elements in any particular order.
*
* @return an iterator over the elements in this queue
*/
public Iterator<E> iterator() {
return new Itr();
}
private final class Itr implements Iterator<E> {
/**
* Index (into queue array) of element to be returned by
* subsequent call to next.
*/
private int cursor;
/**
* Index of element returned by most recent call to next,
* unless that element came from the forgetMeNot list.
* Set to -1 if element is deleted by a call to remove.
*/
private int lastRet = -1;
/**
* A queue of elements that were moved from the unvisited portion of
* the heap into the visited portion as a result of "unlucky" element
* removals during the iteration. (Unlucky element removals are those
* that require a siftup instead of a siftdown.) We must visit all of
* the elements in this list to complete the iteration. We do this
* after we've completed the "normal" iteration.
*
* We expect that most iterations, even those involving removals,
* will not need to store elements in this field.
*/
private ArrayDeque<E> forgetMeNot;
/**
* Element returned by the most recent call to next iff that
* element was drawn from the forgetMeNot list.
*/
private E lastRetElt;
/**
* The modCount value that the iterator believes that the backing
* Queue should have. If this expectation is violated, the iterator
* has detected concurrent modification.
*/
private int expectedModCount = modCount;
Itr() {} // prevent access constructor creation
public boolean hasNext() {
return cursor < size ||
(forgetMeNot != null && !forgetMeNot.isEmpty());
}
public E next() {
if (expectedModCount != modCount)
throw new ConcurrentModificationException();
if (cursor < size)
return (E) queue[lastRet = cursor++];
if (forgetMeNot != null) {
lastRet = -1;
lastRetElt = forgetMeNot.poll();
if (lastRetElt != null)
return lastRetElt;
}
throw new NoSuchElementException();
}
public void remove() {
if (expectedModCount != modCount)
throw new ConcurrentModificationException();
if (lastRet != -1) {
E moved = PriorityQueue.this.removeAt(lastRet);
lastRet = -1;
if (moved == null)
cursor--;
else {
if (forgetMeNot == null)
forgetMeNot = new ArrayDeque<>();
forgetMeNot.add(moved);
}
} else if (lastRetElt != null) {
PriorityQueue.this.removeEq(lastRetElt);
lastRetElt = null;
} else {
throw new IllegalStateException();
}
expectedModCount = modCount;
}
}
public int size() {
return size;
}
/**
* Removes all of the elements from this priority queue.
* The queue will be empty after this call returns.
*/
public void clear() {
modCount++;
final Object[] es = queue;
for (int i = 0, n = size; i < n; i++)
es[i] = null;
size = 0;
}
public E poll() {
final Object[] es;
final E result;
if ((result = (E) ((es = queue)[0])) != null) {
modCount++;
final int n;
final E x = (E) es[(n = --size)];
es[n] = null;
if (n > 0) {
final Comparator<? super E> cmp;
if ((cmp = comparator) == null)
siftDownComparable(0, x, es, n);
else
siftDownUsingComparator(0, x, es, n, cmp);
}
}
return result;
}
/**
* Removes the ith element from queue.
*
* Normally this method leaves the elements at up to i-1,
* inclusive, untouched. Under these circumstances, it returns
* null. Occasionally, in order to maintain the heap invariant,
* it must swap a later element of the list with one earlier than
* i. Under these circumstances, this method returns the element
* that was previously at the end of the list and is now at some
* position before i. This fact is used by iterator.remove so as to
* avoid missing traversing elements.
*/
E removeAt(int i) {
// assert i >= 0 && i < size;
final Object[] es = queue;
modCount++;
int s = --size;
if (s == i) // removed last element
es[i] = null;
else {
E moved = (E) es[s];
es[s] = null;
siftDown(i, moved);
if (es[i] == moved) {
siftUp(i, moved);
if (es[i] != moved)
return moved;
}
}
return null;
}
/**
* Inserts item x at position k, maintaining heap invariant by
* promoting x up the tree until it is greater than or equal to
* its parent, or is the root.
*
* To simplify and speed up coercions and comparisons, the
* Comparable and Comparator versions are separated into different
* methods that are otherwise identical. (Similarly for siftDown.)
*
* @param k the position to fill
* @param x the item to insert
*/
private void siftUp(int k, E x) {
if (comparator != null)
siftUpUsingComparator(k, x, queue, comparator);
else
siftUpComparable(k, x, queue);
}
private static <T> void siftUpComparable(int k, T x, Object[] es) {
Comparable<? super T> key = (Comparable<? super T>) x;
while (k > 0) {
int parent = (k - 1) >>> 1;
Object e = es[parent];
if (key.compareTo((T) e) >= 0)
break;
es[k] = e;
k = parent;
}
es[k] = key;
}
private static <T> void siftUpUsingComparator(
int k, T x, Object[] es, Comparator<? super T> cmp) {
while (k > 0) {
int parent = (k - 1) >>> 1;
Object e = es[parent];
if (cmp.compare(x, (T) e) >= 0)
break;
es[k] = e;
k = parent;
}
es[k] = x;
}
/**
* Inserts item x at position k, maintaining heap invariant by
* demoting x down the tree repeatedly until it is less than or
* equal to its children or is a leaf.
*
* @param k the position to fill
* @param x the item to insert
*/
private void siftDown(int k, E x) {
if (comparator != null)
siftDownUsingComparator(k, x, queue, size, comparator);
else
siftDownComparable(k, x, queue, size);
}
private static <T> void siftDownComparable(int k, T x, Object[] es, int n) {
// assert n > 0;
Comparable<? super T> key = (Comparable<? super T>)x;
int half = n >>> 1; // loop while a non-leaf
while (k < half) {
int child = (k << 1) + 1; // assume left child is least
Object c = es[child];
int right = child + 1;
if (right < n &&
((Comparable<? super T>) c).compareTo((T) es[right]) > 0)
c = es[child = right];
if (key.compareTo((T) c) <= 0)
break;
es[k] = c;
k = child;
}
es[k] = key;
}
private static <T> void siftDownUsingComparator(
int k, T x, Object[] es, int n, Comparator<? super T> cmp) {
// assert n > 0;
int half = n >>> 1;
while (k < half) {
int child = (k << 1) + 1;
Object c = es[child];
int right = child + 1;
if (right < n && cmp.compare((T) c, (T) es[right]) > 0)
c = es[child = right];
if (cmp.compare(x, (T) c) <= 0)
break;
es[k] = c;
k = child;
}
es[k] = x;
}
/**
* Establishes the heap invariant (described above) in the entire tree,
* assuming nothing about the order of the elements prior to the call.
* This classic algorithm due to Floyd (1964) is known to be O(size).
*/
private void heapify() {
final Object[] es = queue;
int n = size, i = (n >>> 1) - 1;
final Comparator<? super E> cmp;
if ((cmp = comparator) == null)
for (; i >= 0; i--)
siftDownComparable(i, (E) es[i], es, n);
else
for (; i >= 0; i--)
siftDownUsingComparator(i, (E) es[i], es, n, cmp);
}
/**
* Returns the comparator used to order the elements in this
* queue, or {@code null} if this queue is sorted according to
* the {@linkplain Comparable natural ordering} of its elements.
*
* @return the comparator used to order this queue, or
* {@code null} if this queue is sorted according to the
* natural ordering of its elements
*/
public Comparator<? super E> comparator() {
return comparator;
}
/**
* Saves this queue to a stream (that is, serializes it).
*
* @param s the stream
* @throws java.io.IOException if an I/O error occurs
* @serialData The length of the array backing the instance is
* emitted (int), followed by all of its elements
* (each an {@code Object}) in the proper order.
*/
private void writeObject(java.io.ObjectOutputStream s)
throws java.io.IOException {
// Write out element count, and any hidden stuff
s.defaultWriteObject();
// Write out array length, for compatibility with 1.5 version
s.writeInt(Math.max(2, size + 1));
// Write out all elements in the "proper order".
final Object[] es = queue;
for (int i = 0, n = size; i < n; i++)
s.writeObject(es[i]);
}
/**
* Reconstitutes the {@code PriorityQueue} instance from a stream
* (that is, deserializes it).
*
* @param s the stream
* @throws ClassNotFoundException if the class of a serialized object
* could not be found
* @throws java.io.IOException if an I/O error occurs
*/
private void readObject(java.io.ObjectInputStream s)
throws java.io.IOException, ClassNotFoundException {
// Read in size, and any hidden stuff
s.defaultReadObject();
// Read in (and discard) array length
s.readInt();
queue = new Object[size];
// Read in all elements.
for (int i = 0; i < size; i++)
queue[i] = s.readObject();
// Elements are guaranteed to be in "proper order", but the
// spec has never explained what that might be.
heapify();
}
/**
* Creates a <em><a href="Spliterator.html#binding">late-binding</a></em>
* and <em>fail-fast</em> {@link Spliterator} over the elements in this
* queue. The spliterator does not traverse elements in any particular order
* (the {@link Spliterator#ORDERED ORDERED} characteristic is not reported).
*
* <p>The {@code Spliterator} reports {@link Spliterator#SIZED},
* {@link Spliterator#SUBSIZED}, and {@link Spliterator#NONNULL}.
* Overriding implementations should document the reporting of additional
* characteristic values.
*
* @return a {@code Spliterator} over the elements in this queue
* @since 1.8
*/
public final Spliterator<E> spliterator() {
return new PriorityQueueSpliterator<>(this, 0, -1, 0);
}
static final class PriorityQueueSpliterator<E> implements Spliterator<E> {
/*
* This is very similar to ArrayList Spliterator, except for
* extra null checks.
*/
private final PriorityQueue<E> pq;
private int index; // current index, modified on advance/split
private int fence; // -1 until first use
private int expectedModCount; // initialized when fence set
/** Creates new spliterator covering the given range. */
PriorityQueueSpliterator(PriorityQueue<E> pq, int origin, int fence,
int expectedModCount) {
this.pq = pq;
this.index = origin;
this.fence = fence;
this.expectedModCount = expectedModCount;
}
private int getFence() { // initialize fence to size on first use
int hi;
if ((hi = fence) < 0) {
expectedModCount = pq.modCount;
hi = fence = pq.size;
}
return hi;
}
public PriorityQueueSpliterator<E> trySplit() {
int hi = getFence(), lo = index, mid = (lo + hi) >>> 1;
return (lo >= mid) ? null :
new PriorityQueueSpliterator<>(pq, lo, index = mid,
expectedModCount);
}
@SuppressWarnings("unchecked")
public void forEachRemaining(Consumer<? super E> action) {
int i, hi, mc; // hoist accesses and checks from loop
PriorityQueue<E> q; Object[] a;
if (action == null)
throw new NullPointerException();
if ((q = pq) != null && (a = q.queue) != null) {
if ((hi = fence) < 0) {
mc = q.modCount;
hi = q.size;
}
else
mc = expectedModCount;
if ((i = index) >= 0 && (index = hi) <= a.length) {
for (E e;; ++i) {
if (i < hi) {
if ((e = (E) a[i]) == null) // must be CME
break;
action.accept(e);
}
else if (q.modCount != mc)
break;
else
return;
}
}
}
throw new ConcurrentModificationException();
}
public boolean tryAdvance(Consumer<? super E> action) {
if (action == null)
throw new NullPointerException();
int hi = getFence(), lo = index;
if (lo >= 0 && lo < hi) {
index = lo + 1;
@SuppressWarnings("unchecked") E e = (E)pq.queue[lo];
if (e == null)
throw new ConcurrentModificationException();
action.accept(e);
if (pq.modCount != expectedModCount)
throw new ConcurrentModificationException();
return true;
}
return false;
}
public long estimateSize() {
return (long) (getFence() - index);
}
public int characteristics() {
return Spliterator.SIZED | Spliterator.SUBSIZED | Spliterator.NONNULL;
}
}
/**
* @throws NullPointerException {@inheritDoc}
*/
public boolean removeIf(Predicate<? super E> filter) {
Objects.requireNonNull(filter);
return bulkRemove(filter);
}
/**
* @throws NullPointerException {@inheritDoc}
*/
public boolean removeAll(Collection<?> c) {
Objects.requireNonNull(c);
return bulkRemove(e -> c.contains(e));
}
/**
* @throws NullPointerException {@inheritDoc}
*/
public boolean retainAll(Collection<?> c) {
Objects.requireNonNull(c);
return bulkRemove(e -> !c.contains(e));
}
// A tiny bit set implementation
private static long[] nBits(int n) {
return new long[((n - 1) >> 6) + 1];
}
private static void setBit(long[] bits, int i) {
bits[i >> 6] |= 1L << i;
}
private static boolean isClear(long[] bits, int i) {
return (bits[i >> 6] & (1L << i)) == 0;
}
/** Implementation of bulk remove methods. */
private boolean bulkRemove(Predicate<? super E> filter) {
final int expectedModCount = ++modCount;
final Object[] es = queue;
final int end = size;
int i;
// Optimize for initial run of survivors
for (i = 0; i < end && !filter.test((E) es[i]); i++)
;
if (i >= end) {
if (modCount != expectedModCount)
throw new ConcurrentModificationException();
return false;
}
// Tolerate predicates that reentrantly access the collection for
// read (but writers still get CME), so traverse once to find
// elements to delete, a second pass to physically expunge.
final int beg = i;
final long[] deathRow = nBits(end - beg);
deathRow[0] = 1L; // set bit 0
for (i = beg + 1; i < end; i++)
if (filter.test((E) es[i]))
setBit(deathRow, i - beg);
if (modCount != expectedModCount)
throw new ConcurrentModificationException();
int w = beg;
for (i = beg; i < end; i++)
if (isClear(deathRow, i - beg))
es[w++] = es[i];
for (i = size = w; i < end; i++)
es[i] = null;
heapify();
return true;
}
/**
* @throws NullPointerException {@inheritDoc}
*/
public void forEach(Consumer<? super E> action) {
Objects.requireNonNull(action);
final int expectedModCount = modCount;
final Object[] es = queue;
for (int i = 0, n = size; i < n; i++)
action.accept((E) es[i]);
if (expectedModCount != modCount)
throw new ConcurrentModificationException();
}
}
|
apache/nifi | 37,104 | nifi-framework-bundle/nifi-framework-extensions/nifi-provenance-repository-bundle/nifi-volatile-provenance-repository/src/main/java/org/apache/nifi/provenance/VolatileProvenanceRepository.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.provenance;
import org.apache.nifi.authorization.AccessDeniedException;
import org.apache.nifi.authorization.AuthorizationResult;
import org.apache.nifi.authorization.AuthorizationResult.Result;
import org.apache.nifi.authorization.Authorizer;
import org.apache.nifi.authorization.RequestAction;
import org.apache.nifi.authorization.resource.Authorizable;
import org.apache.nifi.authorization.user.NiFiUser;
import org.apache.nifi.events.EventReporter;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
import org.apache.nifi.processor.DataUnit;
import org.apache.nifi.provenance.lineage.ComputeLineageSubmission;
import org.apache.nifi.provenance.lineage.FlowFileLineage;
import org.apache.nifi.provenance.lineage.Lineage;
import org.apache.nifi.provenance.lineage.LineageComputationType;
import org.apache.nifi.provenance.search.Query;
import org.apache.nifi.provenance.search.QueryResult;
import org.apache.nifi.provenance.search.QuerySubmission;
import org.apache.nifi.provenance.search.SearchTerm;
import org.apache.nifi.provenance.search.SearchableField;
import org.apache.nifi.util.NiFiProperties;
import org.apache.nifi.util.RingBuffer;
import org.apache.nifi.util.RingBuffer.Filter;
import org.apache.nifi.util.RingBuffer.IterationDirection;
import org.apache.nifi.web.ResourceNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.regex.Pattern;
public class VolatileProvenanceRepository implements ProvenanceRepository {
// properties
public static final String BUFFER_SIZE = "nifi.provenance.repository.buffer.size";
// default property values
public static final int DEFAULT_BUFFER_SIZE = 10000;
public static String CONTAINER_NAME = "in-memory";
private final RingBuffer<ProvenanceEventRecord> ringBuffer;
private final int maxSize;
private final List<SearchableField> searchableFields;
private final List<SearchableField> searchableAttributes;
private final ExecutorService queryExecService;
private final ScheduledExecutorService scheduledExecService;
private final ConcurrentMap<String, AsyncQuerySubmission> querySubmissionMap = new ConcurrentHashMap<>();
private final ConcurrentMap<String, AsyncLineageSubmission> lineageSubmissionMap = new ConcurrentHashMap<>();
private final AtomicLong idGenerator = new AtomicLong(0L);
private final AtomicBoolean initialized = new AtomicBoolean(false);
private Authorizer authorizer; // effectively final
private ProvenanceAuthorizableFactory resourceFactory; // effectively final
/**
* Default no args constructor for service loading only
*/
public VolatileProvenanceRepository() {
ringBuffer = null;
searchableFields = null;
searchableAttributes = null;
queryExecService = null;
scheduledExecService = null;
authorizer = null;
resourceFactory = null;
maxSize = DEFAULT_BUFFER_SIZE;
}
public VolatileProvenanceRepository(final NiFiProperties nifiProperties) {
this(nifiProperties.getIntegerProperty(BUFFER_SIZE, DEFAULT_BUFFER_SIZE),
nifiProperties.getProperty(NiFiProperties.PROVENANCE_INDEXED_FIELDS),
nifiProperties.getProperty(NiFiProperties.PROVENANCE_INDEXED_ATTRIBUTES));
}
public VolatileProvenanceRepository(final int maxEvents, final String indexedFieldString, final String indexAttributeString) {
maxSize = maxEvents;
ringBuffer = new RingBuffer<>(maxSize);
searchableFields = Collections.unmodifiableList(SearchableFieldParser.extractSearchableFields(indexedFieldString, true));
searchableAttributes = Collections.unmodifiableList(SearchableFieldParser.extractSearchableFields(indexAttributeString, false));
final ThreadFactory defaultThreadFactory = Executors.defaultThreadFactory();
queryExecService = Executors.newFixedThreadPool(2, new ThreadFactory() {
private final AtomicInteger counter = new AtomicInteger(0);
@Override
public Thread newThread(final Runnable r) {
final Thread thread = defaultThreadFactory.newThread(r);
thread.setName("Provenance Query Thread-" + counter.incrementAndGet());
return thread;
}
});
scheduledExecService = Executors.newScheduledThreadPool(2);
}
@Override
public void initialize(final EventReporter eventReporter, final Authorizer authorizer, final ProvenanceAuthorizableFactory resourceFactory,
final IdentifierLookup idLookup) throws IOException {
if (initialized.getAndSet(true)) {
return;
}
this.authorizer = authorizer;
this.resourceFactory = resourceFactory;
scheduledExecService.scheduleWithFixedDelay(new RemoveExpiredQueryResults(), 30L, 30L, TimeUnit.SECONDS);
}
@Override
public ProvenanceEventRepository getProvenanceEventRepository() {
return this;
}
@Override
public ProvenanceEventBuilder eventBuilder() {
return new StandardProvenanceEventRecord.Builder();
}
@Override
public void registerEvent(final ProvenanceEventRecord event) {
final long id = idGenerator.getAndIncrement();
ringBuffer.add(new IdEnrichedProvEvent(event, id));
}
@Override
public void registerEvents(final Iterable<ProvenanceEventRecord> events) {
for (final ProvenanceEventRecord event : events) {
registerEvent(event);
}
}
@Override
public List<ProvenanceEventRecord> getEvents(final long firstRecordId, final int maxRecords) throws IOException {
return getEvents(firstRecordId, maxRecords, null);
}
@Override
public List<ProvenanceEventRecord> getEvents(final long firstRecordId, final int maxRecords, final NiFiUser user) {
return ringBuffer.getSelectedElements(value -> {
if (!isAuthorized(value, user)) {
return false;
}
return value.getEventId() >= firstRecordId;
}, maxRecords);
}
@Override
public Long getMaxEventId() {
final ProvenanceEventRecord newest = ringBuffer.getNewestElement();
return (newest == null) ? null : newest.getEventId();
}
public ProvenanceEventRecord getEvent(final String identifier) {
final List<ProvenanceEventRecord> records = ringBuffer.getSelectedElements(event -> identifier.equals(event.getFlowFileUuid()), 1);
return records.isEmpty() ? null : records.getFirst();
}
@Override
public ProvenanceEventRecord getEvent(final long id) {
final List<ProvenanceEventRecord> records = ringBuffer.getSelectedElements(event -> event.getEventId() == id, 1);
return records.isEmpty() ? null : records.getFirst();
}
@Override
public ProvenanceEventRecord getEvent(final long id, final NiFiUser user) {
final ProvenanceEventRecord event = getEvent(id);
if (event == null) {
return null;
}
authorize(event, user);
return event;
}
@Override
public void close() throws IOException {
queryExecService.shutdownNow();
scheduledExecService.shutdown();
}
@Override
public List<SearchableField> getSearchableFields() {
return searchableFields;
}
@Override
public List<SearchableField> getSearchableAttributes() {
return searchableAttributes;
}
public QueryResult queryEvents(final Query query, final NiFiUser user) throws IOException {
final QuerySubmission submission = submitQuery(query, user);
final QueryResult result = submission.getResult();
while (!result.isFinished()) {
try {
Thread.sleep(100L);
} catch (final InterruptedException ignored) {
}
}
if (result.getError() != null) {
throw new IOException(result.getError());
}
return result;
}
public boolean isAuthorized(final ProvenanceEventRecord event, final NiFiUser user) {
if (authorizer == null || user == null) {
return true;
}
final Authorizable eventAuthorizable;
try {
eventAuthorizable = resourceFactory.createProvenanceDataAuthorizable(event.getComponentId());
} catch (final ResourceNotFoundException rnfe) {
return false;
}
final AuthorizationResult result = eventAuthorizable.checkAuthorization(authorizer, RequestAction.READ, user);
return Result.Approved.equals(result.getResult());
}
protected void authorize(final ProvenanceEventRecord event, final NiFiUser user) {
if (authorizer == null || user == null) {
return;
}
final Authorizable eventAuthorizable = resourceFactory.createProvenanceDataAuthorizable(event.getComponentId());
eventAuthorizable.authorize(authorizer, RequestAction.READ, user);
}
private Filter<ProvenanceEventRecord> createFilter(final Query query, final NiFiUser user) {
return event -> {
if (!isAuthorized(event, user)) {
return false;
}
if (query.getStartDate() != null && query.getStartDate().getTime() > event.getEventTime()) {
return false;
}
if (query.getEndDate() != null && query.getEndDate().getTime() < event.getEventTime()) {
return false;
}
if (query.getMaxFileSize() != null) {
final long maxFileSize = DataUnit.parseDataSize(query.getMaxFileSize(), DataUnit.B).longValue();
if (event.getFileSize() > maxFileSize) {
return false;
}
}
if (query.getMinFileSize() != null) {
final long minFileSize = DataUnit.parseDataSize(query.getMinFileSize(), DataUnit.B).longValue();
if (event.getFileSize() < minFileSize) {
return false;
}
}
for (final SearchTerm searchTerm : query.getSearchTerms()) {
final SearchableField searchableField = searchTerm.getSearchableField();
final String searchValue = searchTerm.getValue();
final boolean excludeSearchValue = searchTerm.isInverted().booleanValue();
if (searchableField.isAttribute()) {
final String attributeName = searchableField.getIdentifier();
final String eventAttributeValue = event.getAttributes().get(attributeName);
if (searchValue.contains("?") || searchValue.contains("*")) {
if (eventAttributeValue == null || eventAttributeValue.isEmpty()) {
if (!excludeSearchValue) {
return false;
} else {
continue;
}
}
final String regex = searchValue.replace("?", ".").replace("*", ".*");
final Pattern pattern = Pattern.compile(regex, Pattern.CASE_INSENSITIVE);
final boolean patternMatches = pattern.matcher(eventAttributeValue).matches();
if ((!patternMatches && !excludeSearchValue)
|| (patternMatches && excludeSearchValue)) {
return false;
}
} else if (!searchValue.equalsIgnoreCase(eventAttributeValue) && !excludeSearchValue
|| searchValue.equalsIgnoreCase(eventAttributeValue) && excludeSearchValue) {
return false;
}
} else {
// if FlowFileUUID, search parent & child UUID's also.
if (searchableField.equals(SearchableFields.FlowFileUUID)) {
if (searchValue.contains("?") || searchValue.contains("*")) {
final String regex = searchValue.replace("?", ".").replace("*", ".*");
final Pattern pattern = Pattern.compile(regex, Pattern.CASE_INSENSITIVE);
final boolean patternMatches = pattern.matcher(event.getFlowFileUuid()).matches();
if (!excludeSearchValue) {
if (patternMatches) {
continue;
}
boolean found = false;
for (final String uuid : event.getParentUuids()) {
if (pattern.matcher(uuid).matches()) {
found = true;
break;
}
}
for (final String uuid : event.getChildUuids()) {
if (pattern.matcher(uuid).matches()) {
found = true;
break;
}
}
if (found) {
continue;
}
} else {
if (patternMatches) {
return false;
}
for (final String uuid : event.getParentUuids()) {
if (pattern.matcher(uuid).matches()) {
return false;
}
}
for (final String uuid : event.getChildUuids()) {
if (pattern.matcher(uuid).matches()) {
return false;
}
}
continue;
}
} else if (!excludeSearchValue
&& (event.getFlowFileUuid().equals(searchValue) || event.getParentUuids().contains(searchValue) || event.getChildUuids().contains(searchValue))) {
continue;
} else if (excludeSearchValue
&& (!event.getFlowFileUuid().equals(searchValue) && !event.getParentUuids().contains(searchValue) && !event.getChildUuids().contains(searchValue))) {
continue;
}
return false;
}
final Object fieldValue = getFieldValue(event, searchableField);
if (fieldValue == null) {
if (!excludeSearchValue) {
return false;
} else {
continue;
}
}
if (searchValue.contains("?") || searchValue.contains("*")) {
final String regex = searchValue.replace("?", ".").replace("*", ".*");
final Pattern pattern = Pattern.compile(regex, Pattern.CASE_INSENSITIVE);
final boolean patternMatches = pattern.matcher(String.valueOf(fieldValue)).matches();
if (!patternMatches && !excludeSearchValue
|| patternMatches && excludeSearchValue) {
return false;
}
} else if (!searchValue.equalsIgnoreCase(String.valueOf(fieldValue)) && !excludeSearchValue
|| searchValue.equalsIgnoreCase(String.valueOf(fieldValue)) && excludeSearchValue) {
return false;
}
}
}
return true;
};
}
private Object getFieldValue(final ProvenanceEventRecord record, final SearchableField field) {
if (SearchableFields.AlternateIdentifierURI.equals(field)) {
return record.getAlternateIdentifierUri();
}
if (SearchableFields.ComponentID.equals(field)) {
return record.getComponentId();
}
if (SearchableFields.Details.equals(field)) {
return record.getDetails();
}
if (SearchableFields.EventTime.equals(field)) {
return record.getEventTime();
}
if (SearchableFields.EventType.equals(field)) {
return record.getEventType();
}
if (SearchableFields.Filename.equals(field)) {
return record.getAttributes().get(CoreAttributes.FILENAME.key());
}
if (SearchableFields.FileSize.equals(field)) {
return record.getFileSize();
}
if (SearchableFields.FlowFileUUID.equals(field)) {
return record.getFlowFileUuid();
}
if (SearchableFields.LineageStartDate.equals(field)) {
return record.getLineageStartDate();
}
if (SearchableFields.Relationship.equals(field)) {
return record.getRelationship();
}
if (SearchableFields.TransitURI.equals(field)) {
return record.getTransitUri();
}
return null;
}
@Override
public QuerySubmission submitQuery(final Query query, final NiFiUser user) {
if (query.getEndDate() != null && query.getStartDate() != null && query.getStartDate().getTime() > query.getEndDate().getTime()) {
throw new IllegalArgumentException("Query End Time cannot be before Query Start Time");
}
final String userId = user == null ? null : user.getIdentity();
if (query.getSearchTerms().isEmpty() && query.getStartDate() == null && query.getEndDate() == null) {
final AsyncQuerySubmission result = new AsyncQuerySubmission(query, 1, userId);
queryExecService.submit(new QueryRunnable(ringBuffer, createFilter(query, user), query.getMaxResults(), result));
querySubmissionMap.put(query.getIdentifier(), result);
return result;
}
final AsyncQuerySubmission result = new AsyncQuerySubmission(query, 1, userId);
querySubmissionMap.put(query.getIdentifier(), result);
queryExecService.submit(new QueryRunnable(ringBuffer, createFilter(query, user), query.getMaxResults(), result));
return result;
}
@Override
public List<ProvenanceEventRecord> getLatestCachedEvents(final String componentId, final int eventLimit) {
final List<ProvenanceEventRecord> matches = ringBuffer.getSelectedElements(
event -> componentId.equals(event.getComponentId()), 1);
if (matches.isEmpty()) {
return List.of();
}
return List.of(matches.getLast());
}
@Override
public QuerySubmission retrieveQuerySubmission(final String queryIdentifier, final NiFiUser user) {
final QuerySubmission submission = querySubmissionMap.get(queryIdentifier);
final String userId = submission.getSubmitterIdentity();
if (user == null && userId == null) {
return submission;
}
if (user == null) {
throw new AccessDeniedException("Cannot retrieve Provenance Query Submission because no user id was provided in the provenance request.");
}
if (userId == null || userId.equals(user.getIdentity())) {
return submission;
}
throw new AccessDeniedException("Cannot retrieve Provenance Query Submission because " + user.getIdentity() + " is not the user who submitted the request.");
}
public Lineage computeLineage(final String flowFileUUID, final NiFiUser user) throws IOException {
return computeLineage(Collections.singleton(flowFileUUID), user, LineageComputationType.FLOWFILE_LINEAGE, null);
}
private Lineage computeLineage(final Collection<String> flowFileUuids, final NiFiUser user, final LineageComputationType computationType, final Long eventId) throws IOException {
final AsyncLineageSubmission submission = submitLineageComputation(flowFileUuids, user, computationType, eventId);
final StandardLineageResult result = submission.getResult();
while (!result.isFinished()) {
try {
Thread.sleep(100L);
} catch (final InterruptedException ignored) {
}
}
if (result.getError() != null) {
throw new IOException(result.getError());
}
return new FlowFileLineage(result.getNodes(), result.getEdges());
}
@Override
public ComputeLineageSubmission submitLineageComputation(final long eventId, final NiFiUser user) {
final ProvenanceEventRecord event = getEvent(eventId);
if (event == null) {
final String userId = user == null ? null : user.getIdentity();
final AsyncLineageSubmission result = new AsyncLineageSubmission(LineageComputationType.FLOWFILE_LINEAGE, eventId, Collections.emptySet(), 1, userId);
result.getResult().setError("Could not find event with ID " + eventId);
lineageSubmissionMap.put(result.getLineageIdentifier(), result);
return result;
}
return submitLineageComputation(Collections.singleton(event.getFlowFileUuid()), user, LineageComputationType.FLOWFILE_LINEAGE, eventId);
}
@Override
public AsyncLineageSubmission submitLineageComputation(final String flowFileUuid, final NiFiUser user) {
return submitLineageComputation(Collections.singleton(flowFileUuid), user, LineageComputationType.FLOWFILE_LINEAGE, null);
}
@Override
public ComputeLineageSubmission retrieveLineageSubmission(String lineageIdentifier, final NiFiUser user) {
final ComputeLineageSubmission submission = lineageSubmissionMap.get(lineageIdentifier);
final String userId = submission.getSubmitterIdentity();
if (user == null && userId == null) {
return submission;
}
if (user == null) {
throw new AccessDeniedException("Cannot retrieve Provenance Lineage Submission because no user id was provided in the lineage request.");
}
if (userId == null || userId.equals(user.getIdentity())) {
return submission;
}
throw new AccessDeniedException("Cannot retrieve Provenance Lineage Submission because " + user.getIdentity() + " is not the user who submitted the request.");
}
public Lineage expandSpawnEventParents(String identifier) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public ComputeLineageSubmission submitExpandParents(final long eventId, final NiFiUser user) {
final String userId = user == null ? null : user.getIdentity();
final ProvenanceEventRecord event = getEvent(eventId, user);
if (event == null) {
final AsyncLineageSubmission submission = new AsyncLineageSubmission(LineageComputationType.EXPAND_PARENTS, eventId, Collections.emptyList(), 1, userId);
lineageSubmissionMap.put(submission.getLineageIdentifier(), submission);
submission.getResult().update(Collections.emptyList(), 0L);
return submission;
}
switch (event.getEventType()) {
case JOIN:
case FORK:
case REPLAY:
case CLONE:
return submitLineageComputation(event.getParentUuids(), user, LineageComputationType.EXPAND_PARENTS, eventId);
default: {
final AsyncLineageSubmission submission = new AsyncLineageSubmission(LineageComputationType.EXPAND_PARENTS, eventId, Collections.emptyList(), 1, userId);
lineageSubmissionMap.put(submission.getLineageIdentifier(), submission);
submission.getResult().setError("Event ID " + eventId + " indicates an event of type " + event.getEventType() + " so its parents cannot be expanded");
return submission;
}
}
}
public Lineage expandSpawnEventChildren(final String identifier) {
throw new UnsupportedOperationException();
}
@Override
public ComputeLineageSubmission submitExpandChildren(final long eventId, final NiFiUser user) {
final String userId = user == null ? null : user.getIdentity();
final ProvenanceEventRecord event = getEvent(eventId, user);
if (event == null) {
final AsyncLineageSubmission submission = new AsyncLineageSubmission(LineageComputationType.EXPAND_CHILDREN, eventId, Collections.emptyList(), 1, userId);
lineageSubmissionMap.put(submission.getLineageIdentifier(), submission);
submission.getResult().update(Collections.emptyList(), 0L);
return submission;
}
switch (event.getEventType()) {
case JOIN:
case FORK:
case REPLAY:
case CLONE:
return submitLineageComputation(event.getChildUuids(), user, LineageComputationType.EXPAND_CHILDREN, eventId);
default: {
final AsyncLineageSubmission submission = new AsyncLineageSubmission(LineageComputationType.EXPAND_CHILDREN, eventId, Collections.emptyList(), 1, userId);
lineageSubmissionMap.put(submission.getLineageIdentifier(), submission);
submission.getResult().setError("Event ID " + eventId + " indicates an event of type " + event.getEventType() + " so its children cannot be expanded");
return submission;
}
}
}
@Override
public long getContainerCapacity(final String containerName) throws IOException {
return maxSize;
}
@Override
public Set<String> getContainerNames() {
return Collections.singleton(CONTAINER_NAME);
}
@Override
public long getContainerUsableSpace(String containerName) throws IOException {
return maxSize - ringBuffer.getSize();
}
@Override
public String getContainerFileStoreName(String containerName) {
return null;
}
private AsyncLineageSubmission submitLineageComputation(final Collection<String> flowFileUuids, final NiFiUser user, final LineageComputationType computationType, final Long eventId) {
final String userId = user == null ? null : user.getIdentity();
final AsyncLineageSubmission result = new AsyncLineageSubmission(computationType, eventId, flowFileUuids, 1, userId);
lineageSubmissionMap.put(result.getLineageIdentifier(), result);
final Filter<ProvenanceEventRecord> filter = event -> {
if (!isAuthorized(event, user)) {
return false;
}
if (flowFileUuids.contains(event.getFlowFileUuid())) {
return true;
}
for (final String parentId : event.getParentUuids()) {
if (flowFileUuids.contains(parentId)) {
return true;
}
}
for (final String childId : event.getChildUuids()) {
if (flowFileUuids.contains(childId)) {
return true;
}
}
return false;
};
queryExecService.submit(new ComputeLineageRunnable(ringBuffer, filter, result));
return result;
}
private static class QueryRunnable implements Runnable {
private final RingBuffer<ProvenanceEventRecord> ringBuffer;
private final Filter<ProvenanceEventRecord> filter;
private final AsyncQuerySubmission submission;
private final int maxRecords;
public QueryRunnable(final RingBuffer<ProvenanceEventRecord> ringBuffer, final Filter<ProvenanceEventRecord> filter, final int maxRecords, final AsyncQuerySubmission submission) {
this.ringBuffer = ringBuffer;
this.filter = filter;
this.submission = submission;
this.maxRecords = maxRecords;
}
@Override
public void run() {
// Retrieve the most recent results and count the total number of matches
final AtomicInteger matchingCount = new AtomicInteger(0);
final List<ProvenanceEventRecord> matchingRecords = new ArrayList<>(maxRecords);
ringBuffer.forEach(record -> {
if (filter.select(record)) {
if (matchingCount.incrementAndGet() <= maxRecords) {
matchingRecords.add(record);
}
}
return true;
}, IterationDirection.BACKWARD);
submission.getResult().update(matchingRecords, matchingCount.get());
}
}
private static class ComputeLineageRunnable implements Runnable {
private final RingBuffer<ProvenanceEventRecord> ringBuffer;
private final Filter<ProvenanceEventRecord> filter;
private final AsyncLineageSubmission submission;
public ComputeLineageRunnable(final RingBuffer<ProvenanceEventRecord> ringBuffer, final Filter<ProvenanceEventRecord> filter, final AsyncLineageSubmission submission) {
this.ringBuffer = ringBuffer;
this.filter = filter;
this.submission = submission;
}
@Override
public void run() {
final List<ProvenanceEventRecord> records = ringBuffer.getSelectedElements(filter);
submission.getResult().update(records, records.size());
}
}
private class RemoveExpiredQueryResults implements Runnable {
@Override
public void run() {
final Date now = new Date();
final Iterator<Map.Entry<String, AsyncQuerySubmission>> queryIterator = querySubmissionMap.entrySet().iterator();
while (queryIterator.hasNext()) {
final Map.Entry<String, AsyncQuerySubmission> entry = queryIterator.next();
final StandardQueryResult result = entry.getValue().getResult();
if (result.isFinished() && result.getExpiration().before(now)) {
querySubmissionMap.remove(entry.getKey());
}
}
final Iterator<Map.Entry<String, AsyncLineageSubmission>> lineageIterator = lineageSubmissionMap.entrySet().iterator();
while (lineageIterator.hasNext()) {
final Map.Entry<String, AsyncLineageSubmission> entry = lineageIterator.next();
final StandardLineageResult result = entry.getValue().getResult();
if (result.isFinished() && result.getExpiration().before(now)) {
querySubmissionMap.remove(entry.getKey());
}
}
}
}
private static class IdEnrichedProvEvent implements ProvenanceEventRecord {
private final ProvenanceEventRecord record;
private final long id;
public IdEnrichedProvEvent(final ProvenanceEventRecord record, final long id) {
this.record = record;
this.id = id;
}
@Override
public long getEventId() {
return id;
}
@Override
public long getEventTime() {
return record.getEventTime();
}
@Override
public long getFlowFileEntryDate() {
return record.getFlowFileEntryDate();
}
@Override
public long getLineageStartDate() {
return record.getLineageStartDate();
}
@Override
public long getFileSize() {
return record.getFileSize();
}
@Override
public Long getPreviousFileSize() {
return record.getPreviousFileSize();
}
@Override
public long getEventDuration() {
return record.getEventDuration();
}
@Override
public ProvenanceEventType getEventType() {
return record.getEventType();
}
@Override
public Map<String, String> getAttributes() {
return record.getAttributes();
}
@Override
public Map<String, String> getPreviousAttributes() {
return record.getPreviousAttributes();
}
@Override
public Map<String, String> getUpdatedAttributes() {
return record.getUpdatedAttributes();
}
@Override
public String getComponentId() {
return record.getComponentId();
}
@Override
public String getComponentType() {
return record.getComponentType();
}
@Override
public String getTransitUri() {
return record.getTransitUri();
}
@Override
public String getSourceSystemFlowFileIdentifier() {
return record.getSourceSystemFlowFileIdentifier();
}
@Override
public String getFlowFileUuid() {
return record.getFlowFileUuid();
}
@Override
public List<String> getParentUuids() {
return record.getParentUuids();
}
@Override
public List<String> getChildUuids() {
return record.getChildUuids();
}
@Override
public String getAlternateIdentifierUri() {
return record.getAlternateIdentifierUri();
}
@Override
public String getDetails() {
return record.getDetails();
}
@Override
public String getRelationship() {
return record.getRelationship();
}
@Override
public String getSourceQueueIdentifier() {
return record.getSourceQueueIdentifier();
}
@Override
public String getContentClaimSection() {
return record.getContentClaimSection();
}
@Override
public String getPreviousContentClaimSection() {
return record.getPreviousContentClaimSection();
}
@Override
public String getContentClaimContainer() {
return record.getContentClaimContainer();
}
@Override
public String getPreviousContentClaimContainer() {
return record.getPreviousContentClaimContainer();
}
@Override
public String getContentClaimIdentifier() {
return record.getContentClaimIdentifier();
}
@Override
public String getPreviousContentClaimIdentifier() {
return record.getPreviousContentClaimIdentifier();
}
@Override
public Long getContentClaimOffset() {
return record.getContentClaimOffset();
}
@Override
public Long getPreviousContentClaimOffset() {
return record.getPreviousContentClaimOffset();
}
/**
* Returns the best event identifier for this event (eventId if available, descriptive identifier if not yet persisted to allow for traceability).
*
* @return a descriptive event ID to allow tracing
*/
@Override
public String getBestEventIdentifier() {
return Long.toString(getEventId());
}
}
}
|
googleapis/google-cloud-java | 36,994 | java-dataplex/proto-google-cloud-dataplex-v1/src/main/java/com/google/cloud/dataplex/v1/ListDataScanJobsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dataplex/v1/datascans.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dataplex.v1;
/**
*
*
* <pre>
* List DataScanJobs response.
* </pre>
*
* Protobuf type {@code google.cloud.dataplex.v1.ListDataScanJobsResponse}
*/
public final class ListDataScanJobsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dataplex.v1.ListDataScanJobsResponse)
ListDataScanJobsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListDataScanJobsResponse.newBuilder() to construct.
private ListDataScanJobsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListDataScanJobsResponse() {
dataScanJobs_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListDataScanJobsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataplex.v1.DataScansProto
.internal_static_google_cloud_dataplex_v1_ListDataScanJobsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataplex.v1.DataScansProto
.internal_static_google_cloud_dataplex_v1_ListDataScanJobsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataplex.v1.ListDataScanJobsResponse.class,
com.google.cloud.dataplex.v1.ListDataScanJobsResponse.Builder.class);
}
public static final int DATA_SCAN_JOBS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.dataplex.v1.DataScanJob> dataScanJobs_;
/**
*
*
* <pre>
* DataScanJobs (`BASIC` view only) under a given dataScan.
* </pre>
*
* <code>repeated .google.cloud.dataplex.v1.DataScanJob data_scan_jobs = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.dataplex.v1.DataScanJob> getDataScanJobsList() {
return dataScanJobs_;
}
/**
*
*
* <pre>
* DataScanJobs (`BASIC` view only) under a given dataScan.
* </pre>
*
* <code>repeated .google.cloud.dataplex.v1.DataScanJob data_scan_jobs = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.dataplex.v1.DataScanJobOrBuilder>
getDataScanJobsOrBuilderList() {
return dataScanJobs_;
}
/**
*
*
* <pre>
* DataScanJobs (`BASIC` view only) under a given dataScan.
* </pre>
*
* <code>repeated .google.cloud.dataplex.v1.DataScanJob data_scan_jobs = 1;</code>
*/
@java.lang.Override
public int getDataScanJobsCount() {
return dataScanJobs_.size();
}
/**
*
*
* <pre>
* DataScanJobs (`BASIC` view only) under a given dataScan.
* </pre>
*
* <code>repeated .google.cloud.dataplex.v1.DataScanJob data_scan_jobs = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dataplex.v1.DataScanJob getDataScanJobs(int index) {
return dataScanJobs_.get(index);
}
/**
*
*
* <pre>
* DataScanJobs (`BASIC` view only) under a given dataScan.
* </pre>
*
* <code>repeated .google.cloud.dataplex.v1.DataScanJob data_scan_jobs = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dataplex.v1.DataScanJobOrBuilder getDataScanJobsOrBuilder(int index) {
return dataScanJobs_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < dataScanJobs_.size(); i++) {
output.writeMessage(1, dataScanJobs_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < dataScanJobs_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, dataScanJobs_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dataplex.v1.ListDataScanJobsResponse)) {
return super.equals(obj);
}
com.google.cloud.dataplex.v1.ListDataScanJobsResponse other =
(com.google.cloud.dataplex.v1.ListDataScanJobsResponse) obj;
if (!getDataScanJobsList().equals(other.getDataScanJobsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDataScanJobsCount() > 0) {
hash = (37 * hash) + DATA_SCAN_JOBS_FIELD_NUMBER;
hash = (53 * hash) + getDataScanJobsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dataplex.v1.ListDataScanJobsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataplex.v1.ListDataScanJobsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.ListDataScanJobsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataplex.v1.ListDataScanJobsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.ListDataScanJobsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataplex.v1.ListDataScanJobsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.ListDataScanJobsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataplex.v1.ListDataScanJobsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.ListDataScanJobsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dataplex.v1.ListDataScanJobsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.ListDataScanJobsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataplex.v1.ListDataScanJobsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dataplex.v1.ListDataScanJobsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* List DataScanJobs response.
* </pre>
*
* Protobuf type {@code google.cloud.dataplex.v1.ListDataScanJobsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dataplex.v1.ListDataScanJobsResponse)
com.google.cloud.dataplex.v1.ListDataScanJobsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataplex.v1.DataScansProto
.internal_static_google_cloud_dataplex_v1_ListDataScanJobsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataplex.v1.DataScansProto
.internal_static_google_cloud_dataplex_v1_ListDataScanJobsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataplex.v1.ListDataScanJobsResponse.class,
com.google.cloud.dataplex.v1.ListDataScanJobsResponse.Builder.class);
}
// Construct using com.google.cloud.dataplex.v1.ListDataScanJobsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (dataScanJobsBuilder_ == null) {
dataScanJobs_ = java.util.Collections.emptyList();
} else {
dataScanJobs_ = null;
dataScanJobsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dataplex.v1.DataScansProto
.internal_static_google_cloud_dataplex_v1_ListDataScanJobsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.dataplex.v1.ListDataScanJobsResponse getDefaultInstanceForType() {
return com.google.cloud.dataplex.v1.ListDataScanJobsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dataplex.v1.ListDataScanJobsResponse build() {
com.google.cloud.dataplex.v1.ListDataScanJobsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dataplex.v1.ListDataScanJobsResponse buildPartial() {
com.google.cloud.dataplex.v1.ListDataScanJobsResponse result =
new com.google.cloud.dataplex.v1.ListDataScanJobsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.dataplex.v1.ListDataScanJobsResponse result) {
if (dataScanJobsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
dataScanJobs_ = java.util.Collections.unmodifiableList(dataScanJobs_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.dataScanJobs_ = dataScanJobs_;
} else {
result.dataScanJobs_ = dataScanJobsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.dataplex.v1.ListDataScanJobsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dataplex.v1.ListDataScanJobsResponse) {
return mergeFrom((com.google.cloud.dataplex.v1.ListDataScanJobsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dataplex.v1.ListDataScanJobsResponse other) {
if (other == com.google.cloud.dataplex.v1.ListDataScanJobsResponse.getDefaultInstance())
return this;
if (dataScanJobsBuilder_ == null) {
if (!other.dataScanJobs_.isEmpty()) {
if (dataScanJobs_.isEmpty()) {
dataScanJobs_ = other.dataScanJobs_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDataScanJobsIsMutable();
dataScanJobs_.addAll(other.dataScanJobs_);
}
onChanged();
}
} else {
if (!other.dataScanJobs_.isEmpty()) {
if (dataScanJobsBuilder_.isEmpty()) {
dataScanJobsBuilder_.dispose();
dataScanJobsBuilder_ = null;
dataScanJobs_ = other.dataScanJobs_;
bitField0_ = (bitField0_ & ~0x00000001);
dataScanJobsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getDataScanJobsFieldBuilder()
: null;
} else {
dataScanJobsBuilder_.addAllMessages(other.dataScanJobs_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.dataplex.v1.DataScanJob m =
input.readMessage(
com.google.cloud.dataplex.v1.DataScanJob.parser(), extensionRegistry);
if (dataScanJobsBuilder_ == null) {
ensureDataScanJobsIsMutable();
dataScanJobs_.add(m);
} else {
dataScanJobsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.dataplex.v1.DataScanJob> dataScanJobs_ =
java.util.Collections.emptyList();
private void ensureDataScanJobsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
dataScanJobs_ =
new java.util.ArrayList<com.google.cloud.dataplex.v1.DataScanJob>(dataScanJobs_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dataplex.v1.DataScanJob,
com.google.cloud.dataplex.v1.DataScanJob.Builder,
com.google.cloud.dataplex.v1.DataScanJobOrBuilder>
dataScanJobsBuilder_;
/**
*
*
* <pre>
* DataScanJobs (`BASIC` view only) under a given dataScan.
* </pre>
*
* <code>repeated .google.cloud.dataplex.v1.DataScanJob data_scan_jobs = 1;</code>
*/
public java.util.List<com.google.cloud.dataplex.v1.DataScanJob> getDataScanJobsList() {
if (dataScanJobsBuilder_ == null) {
return java.util.Collections.unmodifiableList(dataScanJobs_);
} else {
return dataScanJobsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* DataScanJobs (`BASIC` view only) under a given dataScan.
* </pre>
*
* <code>repeated .google.cloud.dataplex.v1.DataScanJob data_scan_jobs = 1;</code>
*/
public int getDataScanJobsCount() {
if (dataScanJobsBuilder_ == null) {
return dataScanJobs_.size();
} else {
return dataScanJobsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* DataScanJobs (`BASIC` view only) under a given dataScan.
* </pre>
*
* <code>repeated .google.cloud.dataplex.v1.DataScanJob data_scan_jobs = 1;</code>
*/
public com.google.cloud.dataplex.v1.DataScanJob getDataScanJobs(int index) {
if (dataScanJobsBuilder_ == null) {
return dataScanJobs_.get(index);
} else {
return dataScanJobsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* DataScanJobs (`BASIC` view only) under a given dataScan.
* </pre>
*
* <code>repeated .google.cloud.dataplex.v1.DataScanJob data_scan_jobs = 1;</code>
*/
public Builder setDataScanJobs(int index, com.google.cloud.dataplex.v1.DataScanJob value) {
if (dataScanJobsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDataScanJobsIsMutable();
dataScanJobs_.set(index, value);
onChanged();
} else {
dataScanJobsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* DataScanJobs (`BASIC` view only) under a given dataScan.
* </pre>
*
* <code>repeated .google.cloud.dataplex.v1.DataScanJob data_scan_jobs = 1;</code>
*/
public Builder setDataScanJobs(
int index, com.google.cloud.dataplex.v1.DataScanJob.Builder builderForValue) {
if (dataScanJobsBuilder_ == null) {
ensureDataScanJobsIsMutable();
dataScanJobs_.set(index, builderForValue.build());
onChanged();
} else {
dataScanJobsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* DataScanJobs (`BASIC` view only) under a given dataScan.
* </pre>
*
* <code>repeated .google.cloud.dataplex.v1.DataScanJob data_scan_jobs = 1;</code>
*/
public Builder addDataScanJobs(com.google.cloud.dataplex.v1.DataScanJob value) {
if (dataScanJobsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDataScanJobsIsMutable();
dataScanJobs_.add(value);
onChanged();
} else {
dataScanJobsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* DataScanJobs (`BASIC` view only) under a given dataScan.
* </pre>
*
* <code>repeated .google.cloud.dataplex.v1.DataScanJob data_scan_jobs = 1;</code>
*/
public Builder addDataScanJobs(int index, com.google.cloud.dataplex.v1.DataScanJob value) {
if (dataScanJobsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDataScanJobsIsMutable();
dataScanJobs_.add(index, value);
onChanged();
} else {
dataScanJobsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* DataScanJobs (`BASIC` view only) under a given dataScan.
* </pre>
*
* <code>repeated .google.cloud.dataplex.v1.DataScanJob data_scan_jobs = 1;</code>
*/
public Builder addDataScanJobs(
com.google.cloud.dataplex.v1.DataScanJob.Builder builderForValue) {
if (dataScanJobsBuilder_ == null) {
ensureDataScanJobsIsMutable();
dataScanJobs_.add(builderForValue.build());
onChanged();
} else {
dataScanJobsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* DataScanJobs (`BASIC` view only) under a given dataScan.
* </pre>
*
* <code>repeated .google.cloud.dataplex.v1.DataScanJob data_scan_jobs = 1;</code>
*/
public Builder addDataScanJobs(
int index, com.google.cloud.dataplex.v1.DataScanJob.Builder builderForValue) {
if (dataScanJobsBuilder_ == null) {
ensureDataScanJobsIsMutable();
dataScanJobs_.add(index, builderForValue.build());
onChanged();
} else {
dataScanJobsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* DataScanJobs (`BASIC` view only) under a given dataScan.
* </pre>
*
* <code>repeated .google.cloud.dataplex.v1.DataScanJob data_scan_jobs = 1;</code>
*/
public Builder addAllDataScanJobs(
java.lang.Iterable<? extends com.google.cloud.dataplex.v1.DataScanJob> values) {
if (dataScanJobsBuilder_ == null) {
ensureDataScanJobsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, dataScanJobs_);
onChanged();
} else {
dataScanJobsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* DataScanJobs (`BASIC` view only) under a given dataScan.
* </pre>
*
* <code>repeated .google.cloud.dataplex.v1.DataScanJob data_scan_jobs = 1;</code>
*/
public Builder clearDataScanJobs() {
if (dataScanJobsBuilder_ == null) {
dataScanJobs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
dataScanJobsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* DataScanJobs (`BASIC` view only) under a given dataScan.
* </pre>
*
* <code>repeated .google.cloud.dataplex.v1.DataScanJob data_scan_jobs = 1;</code>
*/
public Builder removeDataScanJobs(int index) {
if (dataScanJobsBuilder_ == null) {
ensureDataScanJobsIsMutable();
dataScanJobs_.remove(index);
onChanged();
} else {
dataScanJobsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* DataScanJobs (`BASIC` view only) under a given dataScan.
* </pre>
*
* <code>repeated .google.cloud.dataplex.v1.DataScanJob data_scan_jobs = 1;</code>
*/
public com.google.cloud.dataplex.v1.DataScanJob.Builder getDataScanJobsBuilder(int index) {
return getDataScanJobsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* DataScanJobs (`BASIC` view only) under a given dataScan.
* </pre>
*
* <code>repeated .google.cloud.dataplex.v1.DataScanJob data_scan_jobs = 1;</code>
*/
public com.google.cloud.dataplex.v1.DataScanJobOrBuilder getDataScanJobsOrBuilder(int index) {
if (dataScanJobsBuilder_ == null) {
return dataScanJobs_.get(index);
} else {
return dataScanJobsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* DataScanJobs (`BASIC` view only) under a given dataScan.
* </pre>
*
* <code>repeated .google.cloud.dataplex.v1.DataScanJob data_scan_jobs = 1;</code>
*/
public java.util.List<? extends com.google.cloud.dataplex.v1.DataScanJobOrBuilder>
getDataScanJobsOrBuilderList() {
if (dataScanJobsBuilder_ != null) {
return dataScanJobsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(dataScanJobs_);
}
}
/**
*
*
* <pre>
* DataScanJobs (`BASIC` view only) under a given dataScan.
* </pre>
*
* <code>repeated .google.cloud.dataplex.v1.DataScanJob data_scan_jobs = 1;</code>
*/
public com.google.cloud.dataplex.v1.DataScanJob.Builder addDataScanJobsBuilder() {
return getDataScanJobsFieldBuilder()
.addBuilder(com.google.cloud.dataplex.v1.DataScanJob.getDefaultInstance());
}
/**
*
*
* <pre>
* DataScanJobs (`BASIC` view only) under a given dataScan.
* </pre>
*
* <code>repeated .google.cloud.dataplex.v1.DataScanJob data_scan_jobs = 1;</code>
*/
public com.google.cloud.dataplex.v1.DataScanJob.Builder addDataScanJobsBuilder(int index) {
return getDataScanJobsFieldBuilder()
.addBuilder(index, com.google.cloud.dataplex.v1.DataScanJob.getDefaultInstance());
}
/**
*
*
* <pre>
* DataScanJobs (`BASIC` view only) under a given dataScan.
* </pre>
*
* <code>repeated .google.cloud.dataplex.v1.DataScanJob data_scan_jobs = 1;</code>
*/
public java.util.List<com.google.cloud.dataplex.v1.DataScanJob.Builder>
getDataScanJobsBuilderList() {
return getDataScanJobsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dataplex.v1.DataScanJob,
com.google.cloud.dataplex.v1.DataScanJob.Builder,
com.google.cloud.dataplex.v1.DataScanJobOrBuilder>
getDataScanJobsFieldBuilder() {
if (dataScanJobsBuilder_ == null) {
dataScanJobsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dataplex.v1.DataScanJob,
com.google.cloud.dataplex.v1.DataScanJob.Builder,
com.google.cloud.dataplex.v1.DataScanJobOrBuilder>(
dataScanJobs_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
dataScanJobs_ = null;
}
return dataScanJobsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dataplex.v1.ListDataScanJobsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.dataplex.v1.ListDataScanJobsResponse)
private static final com.google.cloud.dataplex.v1.ListDataScanJobsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dataplex.v1.ListDataScanJobsResponse();
}
public static com.google.cloud.dataplex.v1.ListDataScanJobsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListDataScanJobsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListDataScanJobsResponse>() {
@java.lang.Override
public ListDataScanJobsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListDataScanJobsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListDataScanJobsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dataplex.v1.ListDataScanJobsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,949 | java-artifact-registry/proto-google-cloud-artifact-registry-v1/src/main/java/com/google/devtools/artifactregistry/v1/CreateAttachmentRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/devtools/artifactregistry/v1/attachment.proto
// Protobuf Java Version: 3.25.8
package com.google.devtools.artifactregistry.v1;
/**
*
*
* <pre>
* The request to create a new attachment.
* </pre>
*
* Protobuf type {@code google.devtools.artifactregistry.v1.CreateAttachmentRequest}
*/
public final class CreateAttachmentRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.devtools.artifactregistry.v1.CreateAttachmentRequest)
CreateAttachmentRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateAttachmentRequest.newBuilder() to construct.
private CreateAttachmentRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateAttachmentRequest() {
parent_ = "";
attachmentId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateAttachmentRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.devtools.artifactregistry.v1.AttachmentProto
.internal_static_google_devtools_artifactregistry_v1_CreateAttachmentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.devtools.artifactregistry.v1.AttachmentProto
.internal_static_google_devtools_artifactregistry_v1_CreateAttachmentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.devtools.artifactregistry.v1.CreateAttachmentRequest.class,
com.google.devtools.artifactregistry.v1.CreateAttachmentRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of the parent resource where the attachment will be
* created.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the parent resource where the attachment will be
* created.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ATTACHMENT_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object attachmentId_ = "";
/**
*
*
* <pre>
* Required. The attachment id to use for this attachment.
* </pre>
*
* <code>string attachment_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The attachmentId.
*/
@java.lang.Override
public java.lang.String getAttachmentId() {
java.lang.Object ref = attachmentId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
attachmentId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The attachment id to use for this attachment.
* </pre>
*
* <code>string attachment_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for attachmentId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getAttachmentIdBytes() {
java.lang.Object ref = attachmentId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
attachmentId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ATTACHMENT_FIELD_NUMBER = 3;
private com.google.devtools.artifactregistry.v1.Attachment attachment_;
/**
*
*
* <pre>
* Required. The attachment to be created.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.Attachment attachment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the attachment field is set.
*/
@java.lang.Override
public boolean hasAttachment() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The attachment to be created.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.Attachment attachment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The attachment.
*/
@java.lang.Override
public com.google.devtools.artifactregistry.v1.Attachment getAttachment() {
return attachment_ == null
? com.google.devtools.artifactregistry.v1.Attachment.getDefaultInstance()
: attachment_;
}
/**
*
*
* <pre>
* Required. The attachment to be created.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.Attachment attachment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.devtools.artifactregistry.v1.AttachmentOrBuilder getAttachmentOrBuilder() {
return attachment_ == null
? com.google.devtools.artifactregistry.v1.Attachment.getDefaultInstance()
: attachment_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(attachmentId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, attachmentId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getAttachment());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(attachmentId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, attachmentId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getAttachment());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.devtools.artifactregistry.v1.CreateAttachmentRequest)) {
return super.equals(obj);
}
com.google.devtools.artifactregistry.v1.CreateAttachmentRequest other =
(com.google.devtools.artifactregistry.v1.CreateAttachmentRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getAttachmentId().equals(other.getAttachmentId())) return false;
if (hasAttachment() != other.hasAttachment()) return false;
if (hasAttachment()) {
if (!getAttachment().equals(other.getAttachment())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + ATTACHMENT_ID_FIELD_NUMBER;
hash = (53 * hash) + getAttachmentId().hashCode();
if (hasAttachment()) {
hash = (37 * hash) + ATTACHMENT_FIELD_NUMBER;
hash = (53 * hash) + getAttachment().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.devtools.artifactregistry.v1.CreateAttachmentRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1.CreateAttachmentRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.CreateAttachmentRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1.CreateAttachmentRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.CreateAttachmentRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1.CreateAttachmentRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.CreateAttachmentRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1.CreateAttachmentRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.CreateAttachmentRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1.CreateAttachmentRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.CreateAttachmentRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1.CreateAttachmentRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.devtools.artifactregistry.v1.CreateAttachmentRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request to create a new attachment.
* </pre>
*
* Protobuf type {@code google.devtools.artifactregistry.v1.CreateAttachmentRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.devtools.artifactregistry.v1.CreateAttachmentRequest)
com.google.devtools.artifactregistry.v1.CreateAttachmentRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.devtools.artifactregistry.v1.AttachmentProto
.internal_static_google_devtools_artifactregistry_v1_CreateAttachmentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.devtools.artifactregistry.v1.AttachmentProto
.internal_static_google_devtools_artifactregistry_v1_CreateAttachmentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.devtools.artifactregistry.v1.CreateAttachmentRequest.class,
com.google.devtools.artifactregistry.v1.CreateAttachmentRequest.Builder.class);
}
// Construct using com.google.devtools.artifactregistry.v1.CreateAttachmentRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getAttachmentFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
attachmentId_ = "";
attachment_ = null;
if (attachmentBuilder_ != null) {
attachmentBuilder_.dispose();
attachmentBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.devtools.artifactregistry.v1.AttachmentProto
.internal_static_google_devtools_artifactregistry_v1_CreateAttachmentRequest_descriptor;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.CreateAttachmentRequest
getDefaultInstanceForType() {
return com.google.devtools.artifactregistry.v1.CreateAttachmentRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.CreateAttachmentRequest build() {
com.google.devtools.artifactregistry.v1.CreateAttachmentRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.CreateAttachmentRequest buildPartial() {
com.google.devtools.artifactregistry.v1.CreateAttachmentRequest result =
new com.google.devtools.artifactregistry.v1.CreateAttachmentRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.devtools.artifactregistry.v1.CreateAttachmentRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.attachmentId_ = attachmentId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.attachment_ = attachmentBuilder_ == null ? attachment_ : attachmentBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.devtools.artifactregistry.v1.CreateAttachmentRequest) {
return mergeFrom((com.google.devtools.artifactregistry.v1.CreateAttachmentRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.devtools.artifactregistry.v1.CreateAttachmentRequest other) {
if (other
== com.google.devtools.artifactregistry.v1.CreateAttachmentRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getAttachmentId().isEmpty()) {
attachmentId_ = other.attachmentId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasAttachment()) {
mergeAttachment(other.getAttachment());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
attachmentId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getAttachmentFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of the parent resource where the attachment will be
* created.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the parent resource where the attachment will be
* created.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the parent resource where the attachment will be
* created.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the parent resource where the attachment will be
* created.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the parent resource where the attachment will be
* created.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object attachmentId_ = "";
/**
*
*
* <pre>
* Required. The attachment id to use for this attachment.
* </pre>
*
* <code>string attachment_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The attachmentId.
*/
public java.lang.String getAttachmentId() {
java.lang.Object ref = attachmentId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
attachmentId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The attachment id to use for this attachment.
* </pre>
*
* <code>string attachment_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for attachmentId.
*/
public com.google.protobuf.ByteString getAttachmentIdBytes() {
java.lang.Object ref = attachmentId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
attachmentId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The attachment id to use for this attachment.
* </pre>
*
* <code>string attachment_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The attachmentId to set.
* @return This builder for chaining.
*/
public Builder setAttachmentId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
attachmentId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The attachment id to use for this attachment.
* </pre>
*
* <code>string attachment_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearAttachmentId() {
attachmentId_ = getDefaultInstance().getAttachmentId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The attachment id to use for this attachment.
* </pre>
*
* <code>string attachment_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for attachmentId to set.
* @return This builder for chaining.
*/
public Builder setAttachmentIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
attachmentId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.devtools.artifactregistry.v1.Attachment attachment_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.devtools.artifactregistry.v1.Attachment,
com.google.devtools.artifactregistry.v1.Attachment.Builder,
com.google.devtools.artifactregistry.v1.AttachmentOrBuilder>
attachmentBuilder_;
/**
*
*
* <pre>
* Required. The attachment to be created.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.Attachment attachment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the attachment field is set.
*/
public boolean hasAttachment() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Required. The attachment to be created.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.Attachment attachment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The attachment.
*/
public com.google.devtools.artifactregistry.v1.Attachment getAttachment() {
if (attachmentBuilder_ == null) {
return attachment_ == null
? com.google.devtools.artifactregistry.v1.Attachment.getDefaultInstance()
: attachment_;
} else {
return attachmentBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The attachment to be created.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.Attachment attachment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setAttachment(com.google.devtools.artifactregistry.v1.Attachment value) {
if (attachmentBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
attachment_ = value;
} else {
attachmentBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The attachment to be created.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.Attachment attachment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setAttachment(
com.google.devtools.artifactregistry.v1.Attachment.Builder builderForValue) {
if (attachmentBuilder_ == null) {
attachment_ = builderForValue.build();
} else {
attachmentBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The attachment to be created.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.Attachment attachment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeAttachment(com.google.devtools.artifactregistry.v1.Attachment value) {
if (attachmentBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& attachment_ != null
&& attachment_
!= com.google.devtools.artifactregistry.v1.Attachment.getDefaultInstance()) {
getAttachmentBuilder().mergeFrom(value);
} else {
attachment_ = value;
}
} else {
attachmentBuilder_.mergeFrom(value);
}
if (attachment_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The attachment to be created.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.Attachment attachment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearAttachment() {
bitField0_ = (bitField0_ & ~0x00000004);
attachment_ = null;
if (attachmentBuilder_ != null) {
attachmentBuilder_.dispose();
attachmentBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The attachment to be created.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.Attachment attachment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.devtools.artifactregistry.v1.Attachment.Builder getAttachmentBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getAttachmentFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The attachment to be created.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.Attachment attachment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.devtools.artifactregistry.v1.AttachmentOrBuilder getAttachmentOrBuilder() {
if (attachmentBuilder_ != null) {
return attachmentBuilder_.getMessageOrBuilder();
} else {
return attachment_ == null
? com.google.devtools.artifactregistry.v1.Attachment.getDefaultInstance()
: attachment_;
}
}
/**
*
*
* <pre>
* Required. The attachment to be created.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.Attachment attachment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.devtools.artifactregistry.v1.Attachment,
com.google.devtools.artifactregistry.v1.Attachment.Builder,
com.google.devtools.artifactregistry.v1.AttachmentOrBuilder>
getAttachmentFieldBuilder() {
if (attachmentBuilder_ == null) {
attachmentBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.devtools.artifactregistry.v1.Attachment,
com.google.devtools.artifactregistry.v1.Attachment.Builder,
com.google.devtools.artifactregistry.v1.AttachmentOrBuilder>(
getAttachment(), getParentForChildren(), isClean());
attachment_ = null;
}
return attachmentBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.devtools.artifactregistry.v1.CreateAttachmentRequest)
}
// @@protoc_insertion_point(class_scope:google.devtools.artifactregistry.v1.CreateAttachmentRequest)
private static final com.google.devtools.artifactregistry.v1.CreateAttachmentRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.devtools.artifactregistry.v1.CreateAttachmentRequest();
}
public static com.google.devtools.artifactregistry.v1.CreateAttachmentRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateAttachmentRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateAttachmentRequest>() {
@java.lang.Override
public CreateAttachmentRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateAttachmentRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateAttachmentRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.CreateAttachmentRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,949 | java-artifact-registry/proto-google-cloud-artifact-registry-v1/src/main/java/com/google/devtools/artifactregistry/v1/CreateRepositoryRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/devtools/artifactregistry/v1/repository.proto
// Protobuf Java Version: 3.25.8
package com.google.devtools.artifactregistry.v1;
/**
*
*
* <pre>
* The request to create a new repository.
* </pre>
*
* Protobuf type {@code google.devtools.artifactregistry.v1.CreateRepositoryRequest}
*/
public final class CreateRepositoryRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.devtools.artifactregistry.v1.CreateRepositoryRequest)
CreateRepositoryRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateRepositoryRequest.newBuilder() to construct.
private CreateRepositoryRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateRepositoryRequest() {
parent_ = "";
repositoryId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateRepositoryRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.devtools.artifactregistry.v1.RepositoryProto
.internal_static_google_devtools_artifactregistry_v1_CreateRepositoryRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.devtools.artifactregistry.v1.RepositoryProto
.internal_static_google_devtools_artifactregistry_v1_CreateRepositoryRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.devtools.artifactregistry.v1.CreateRepositoryRequest.class,
com.google.devtools.artifactregistry.v1.CreateRepositoryRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of the parent resource where the repository will be
* created.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the parent resource where the repository will be
* created.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REPOSITORY_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object repositoryId_ = "";
/**
*
*
* <pre>
* Required. The repository id to use for this repository.
* </pre>
*
* <code>string repository_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The repositoryId.
*/
@java.lang.Override
public java.lang.String getRepositoryId() {
java.lang.Object ref = repositoryId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
repositoryId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The repository id to use for this repository.
* </pre>
*
* <code>string repository_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for repositoryId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRepositoryIdBytes() {
java.lang.Object ref = repositoryId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
repositoryId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REPOSITORY_FIELD_NUMBER = 3;
private com.google.devtools.artifactregistry.v1.Repository repository_;
/**
*
*
* <pre>
* Required. The repository to be created.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.Repository repository = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the repository field is set.
*/
@java.lang.Override
public boolean hasRepository() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The repository to be created.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.Repository repository = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The repository.
*/
@java.lang.Override
public com.google.devtools.artifactregistry.v1.Repository getRepository() {
return repository_ == null
? com.google.devtools.artifactregistry.v1.Repository.getDefaultInstance()
: repository_;
}
/**
*
*
* <pre>
* Required. The repository to be created.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.Repository repository = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.devtools.artifactregistry.v1.RepositoryOrBuilder getRepositoryOrBuilder() {
return repository_ == null
? com.google.devtools.artifactregistry.v1.Repository.getDefaultInstance()
: repository_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(repositoryId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, repositoryId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getRepository());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(repositoryId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, repositoryId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getRepository());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.devtools.artifactregistry.v1.CreateRepositoryRequest)) {
return super.equals(obj);
}
com.google.devtools.artifactregistry.v1.CreateRepositoryRequest other =
(com.google.devtools.artifactregistry.v1.CreateRepositoryRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getRepositoryId().equals(other.getRepositoryId())) return false;
if (hasRepository() != other.hasRepository()) return false;
if (hasRepository()) {
if (!getRepository().equals(other.getRepository())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + REPOSITORY_ID_FIELD_NUMBER;
hash = (53 * hash) + getRepositoryId().hashCode();
if (hasRepository()) {
hash = (37 * hash) + REPOSITORY_FIELD_NUMBER;
hash = (53 * hash) + getRepository().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.devtools.artifactregistry.v1.CreateRepositoryRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1.CreateRepositoryRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.CreateRepositoryRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1.CreateRepositoryRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.CreateRepositoryRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1.CreateRepositoryRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.CreateRepositoryRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1.CreateRepositoryRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.CreateRepositoryRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1.CreateRepositoryRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.CreateRepositoryRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1.CreateRepositoryRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.devtools.artifactregistry.v1.CreateRepositoryRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request to create a new repository.
* </pre>
*
* Protobuf type {@code google.devtools.artifactregistry.v1.CreateRepositoryRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.devtools.artifactregistry.v1.CreateRepositoryRequest)
com.google.devtools.artifactregistry.v1.CreateRepositoryRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.devtools.artifactregistry.v1.RepositoryProto
.internal_static_google_devtools_artifactregistry_v1_CreateRepositoryRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.devtools.artifactregistry.v1.RepositoryProto
.internal_static_google_devtools_artifactregistry_v1_CreateRepositoryRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.devtools.artifactregistry.v1.CreateRepositoryRequest.class,
com.google.devtools.artifactregistry.v1.CreateRepositoryRequest.Builder.class);
}
// Construct using com.google.devtools.artifactregistry.v1.CreateRepositoryRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getRepositoryFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
repositoryId_ = "";
repository_ = null;
if (repositoryBuilder_ != null) {
repositoryBuilder_.dispose();
repositoryBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.devtools.artifactregistry.v1.RepositoryProto
.internal_static_google_devtools_artifactregistry_v1_CreateRepositoryRequest_descriptor;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.CreateRepositoryRequest
getDefaultInstanceForType() {
return com.google.devtools.artifactregistry.v1.CreateRepositoryRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.CreateRepositoryRequest build() {
com.google.devtools.artifactregistry.v1.CreateRepositoryRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.CreateRepositoryRequest buildPartial() {
com.google.devtools.artifactregistry.v1.CreateRepositoryRequest result =
new com.google.devtools.artifactregistry.v1.CreateRepositoryRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.devtools.artifactregistry.v1.CreateRepositoryRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.repositoryId_ = repositoryId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.repository_ = repositoryBuilder_ == null ? repository_ : repositoryBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.devtools.artifactregistry.v1.CreateRepositoryRequest) {
return mergeFrom((com.google.devtools.artifactregistry.v1.CreateRepositoryRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.devtools.artifactregistry.v1.CreateRepositoryRequest other) {
if (other
== com.google.devtools.artifactregistry.v1.CreateRepositoryRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getRepositoryId().isEmpty()) {
repositoryId_ = other.repositoryId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasRepository()) {
mergeRepository(other.getRepository());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
repositoryId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getRepositoryFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of the parent resource where the repository will be
* created.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the parent resource where the repository will be
* created.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the parent resource where the repository will be
* created.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the parent resource where the repository will be
* created.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the parent resource where the repository will be
* created.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object repositoryId_ = "";
/**
*
*
* <pre>
* Required. The repository id to use for this repository.
* </pre>
*
* <code>string repository_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The repositoryId.
*/
public java.lang.String getRepositoryId() {
java.lang.Object ref = repositoryId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
repositoryId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The repository id to use for this repository.
* </pre>
*
* <code>string repository_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for repositoryId.
*/
public com.google.protobuf.ByteString getRepositoryIdBytes() {
java.lang.Object ref = repositoryId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
repositoryId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The repository id to use for this repository.
* </pre>
*
* <code>string repository_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The repositoryId to set.
* @return This builder for chaining.
*/
public Builder setRepositoryId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
repositoryId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The repository id to use for this repository.
* </pre>
*
* <code>string repository_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearRepositoryId() {
repositoryId_ = getDefaultInstance().getRepositoryId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The repository id to use for this repository.
* </pre>
*
* <code>string repository_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for repositoryId to set.
* @return This builder for chaining.
*/
public Builder setRepositoryIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
repositoryId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.devtools.artifactregistry.v1.Repository repository_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.devtools.artifactregistry.v1.Repository,
com.google.devtools.artifactregistry.v1.Repository.Builder,
com.google.devtools.artifactregistry.v1.RepositoryOrBuilder>
repositoryBuilder_;
/**
*
*
* <pre>
* Required. The repository to be created.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.Repository repository = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the repository field is set.
*/
public boolean hasRepository() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Required. The repository to be created.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.Repository repository = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The repository.
*/
public com.google.devtools.artifactregistry.v1.Repository getRepository() {
if (repositoryBuilder_ == null) {
return repository_ == null
? com.google.devtools.artifactregistry.v1.Repository.getDefaultInstance()
: repository_;
} else {
return repositoryBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The repository to be created.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.Repository repository = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setRepository(com.google.devtools.artifactregistry.v1.Repository value) {
if (repositoryBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
repository_ = value;
} else {
repositoryBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The repository to be created.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.Repository repository = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setRepository(
com.google.devtools.artifactregistry.v1.Repository.Builder builderForValue) {
if (repositoryBuilder_ == null) {
repository_ = builderForValue.build();
} else {
repositoryBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The repository to be created.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.Repository repository = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeRepository(com.google.devtools.artifactregistry.v1.Repository value) {
if (repositoryBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& repository_ != null
&& repository_
!= com.google.devtools.artifactregistry.v1.Repository.getDefaultInstance()) {
getRepositoryBuilder().mergeFrom(value);
} else {
repository_ = value;
}
} else {
repositoryBuilder_.mergeFrom(value);
}
if (repository_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The repository to be created.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.Repository repository = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearRepository() {
bitField0_ = (bitField0_ & ~0x00000004);
repository_ = null;
if (repositoryBuilder_ != null) {
repositoryBuilder_.dispose();
repositoryBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The repository to be created.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.Repository repository = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.devtools.artifactregistry.v1.Repository.Builder getRepositoryBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getRepositoryFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The repository to be created.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.Repository repository = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.devtools.artifactregistry.v1.RepositoryOrBuilder getRepositoryOrBuilder() {
if (repositoryBuilder_ != null) {
return repositoryBuilder_.getMessageOrBuilder();
} else {
return repository_ == null
? com.google.devtools.artifactregistry.v1.Repository.getDefaultInstance()
: repository_;
}
}
/**
*
*
* <pre>
* Required. The repository to be created.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.Repository repository = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.devtools.artifactregistry.v1.Repository,
com.google.devtools.artifactregistry.v1.Repository.Builder,
com.google.devtools.artifactregistry.v1.RepositoryOrBuilder>
getRepositoryFieldBuilder() {
if (repositoryBuilder_ == null) {
repositoryBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.devtools.artifactregistry.v1.Repository,
com.google.devtools.artifactregistry.v1.Repository.Builder,
com.google.devtools.artifactregistry.v1.RepositoryOrBuilder>(
getRepository(), getParentForChildren(), isClean());
repository_ = null;
}
return repositoryBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.devtools.artifactregistry.v1.CreateRepositoryRequest)
}
// @@protoc_insertion_point(class_scope:google.devtools.artifactregistry.v1.CreateRepositoryRequest)
private static final com.google.devtools.artifactregistry.v1.CreateRepositoryRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.devtools.artifactregistry.v1.CreateRepositoryRequest();
}
public static com.google.devtools.artifactregistry.v1.CreateRepositoryRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateRepositoryRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateRepositoryRequest>() {
@java.lang.Override
public CreateRepositoryRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateRepositoryRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateRepositoryRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.CreateRepositoryRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/flink | 36,942 | flink-libraries/flink-cep/src/test/java/org/apache/flink/cep/nfa/AfterMatchSkipITCase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.cep.nfa;
import org.apache.flink.cep.Event;
import org.apache.flink.cep.nfa.aftermatch.AfterMatchSkipStrategy;
import org.apache.flink.cep.nfa.aftermatch.SkipPastLastStrategy;
import org.apache.flink.cep.nfa.sharedbuffer.SharedBuffer;
import org.apache.flink.cep.pattern.Pattern;
import org.apache.flink.cep.pattern.conditions.IterativeCondition;
import org.apache.flink.cep.pattern.conditions.SimpleCondition;
import org.apache.flink.cep.utils.NFATestHarness;
import org.apache.flink.cep.utils.TestSharedBuffer;
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
import org.apache.flink.util.FlinkRuntimeException;
import org.apache.flink.util.TestLogger;
import org.apache.flink.shaded.guava33.com.google.common.collect.Lists;
import org.hamcrest.Matchers;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static org.apache.flink.cep.utils.NFATestUtilities.comparePatterns;
import static org.junit.Assert.assertThat;
/** IT tests covering {@link AfterMatchSkipStrategy}. */
public class AfterMatchSkipITCase extends TestLogger {
@Test
public void testNoSkip() throws Exception {
List<StreamRecord<Event>> streamEvents = new ArrayList<>();
Event a1 = new Event(1, "a", 0.0);
Event a2 = new Event(2, "a", 0.0);
Event a3 = new Event(3, "a", 0.0);
Event a4 = new Event(4, "a", 0.0);
Event a5 = new Event(5, "a", 0.0);
Event a6 = new Event(6, "a", 0.0);
streamEvents.add(new StreamRecord<Event>(a1));
streamEvents.add(new StreamRecord<Event>(a2));
streamEvents.add(new StreamRecord<Event>(a3));
streamEvents.add(new StreamRecord<Event>(a4));
streamEvents.add(new StreamRecord<Event>(a5));
streamEvents.add(new StreamRecord<Event>(a6));
Pattern<Event, ?> pattern =
Pattern.<Event>begin("start", AfterMatchSkipStrategy.noSkip())
.where(SimpleCondition.of(value -> value.getName().equals("a")))
.times(3);
NFATestHarness nfaTestHarness = NFATestHarness.forPattern(pattern).build();
List<List<Event>> resultingPatterns = nfaTestHarness.feedRecords(streamEvents);
comparePatterns(
resultingPatterns,
Lists.newArrayList(
Lists.newArrayList(a1, a2, a3),
Lists.newArrayList(a2, a3, a4),
Lists.newArrayList(a3, a4, a5),
Lists.newArrayList(a4, a5, a6)));
}
@Test
public void testNoSkipWithFollowedByAny() throws Exception {
List<List<Event>> resultingPatterns =
TwoVariablesFollowedByAny.compute(AfterMatchSkipStrategy.noSkip());
comparePatterns(
resultingPatterns,
Lists.newArrayList(
Lists.newArrayList(
TwoVariablesFollowedByAny.a1, TwoVariablesFollowedByAny.b1),
Lists.newArrayList(
TwoVariablesFollowedByAny.a1, TwoVariablesFollowedByAny.b2),
Lists.newArrayList(
TwoVariablesFollowedByAny.a2, TwoVariablesFollowedByAny.b2)));
}
@Test
public void testSkipToNextWithFollowedByAny() throws Exception {
List<List<Event>> resultingPatterns =
TwoVariablesFollowedByAny.compute(AfterMatchSkipStrategy.skipToNext());
comparePatterns(
resultingPatterns,
Lists.newArrayList(
Lists.newArrayList(
TwoVariablesFollowedByAny.a1, TwoVariablesFollowedByAny.b1),
Lists.newArrayList(
TwoVariablesFollowedByAny.a2, TwoVariablesFollowedByAny.b2)));
}
static class TwoVariablesFollowedByAny {
static Event a1 = new Event(1, "a", 0.0);
static Event b1 = new Event(2, "b", 0.0);
static Event a2 = new Event(4, "a", 0.0);
static Event b2 = new Event(5, "b", 0.0);
private static List<List<Event>> compute(AfterMatchSkipStrategy skipStrategy)
throws Exception {
List<StreamRecord<Event>> streamEvents = new ArrayList<>();
streamEvents.add(new StreamRecord<>(a1));
streamEvents.add(new StreamRecord<>(b1));
streamEvents.add(new StreamRecord<>(a2));
streamEvents.add(new StreamRecord<>(b2));
Pattern<Event, ?> pattern =
Pattern.<Event>begin("start")
.where(SimpleCondition.of(value -> value.getName().equals("a")))
.followedByAny("end")
.where(SimpleCondition.of(value -> value.getName().equals("b")));
NFATestHarness nfaTestHarness =
NFATestHarness.forPattern(pattern)
.withAfterMatchSkipStrategy(skipStrategy)
.build();
return nfaTestHarness.feedRecords(streamEvents);
}
}
@Test
public void testNoSkipWithQuantifierAtTheEnd() throws Exception {
List<List<Event>> resultingPatterns =
QuantifierAtEndOfPattern.compute(AfterMatchSkipStrategy.noSkip());
comparePatterns(
resultingPatterns,
Lists.newArrayList(
Lists.newArrayList(
QuantifierAtEndOfPattern.a1,
QuantifierAtEndOfPattern.b1,
QuantifierAtEndOfPattern.b2,
QuantifierAtEndOfPattern.b3),
Lists.newArrayList(
QuantifierAtEndOfPattern.a1,
QuantifierAtEndOfPattern.b1,
QuantifierAtEndOfPattern.b2),
Lists.newArrayList(
QuantifierAtEndOfPattern.a1, QuantifierAtEndOfPattern.b1)));
}
@Test
public void testSkipToNextWithQuantifierAtTheEnd() throws Exception {
List<List<Event>> resultingPatterns =
QuantifierAtEndOfPattern.compute(AfterMatchSkipStrategy.skipToNext());
comparePatterns(
resultingPatterns,
Lists.<List<Event>>newArrayList(
Lists.newArrayList(
QuantifierAtEndOfPattern.a1, QuantifierAtEndOfPattern.b1)));
}
static class QuantifierAtEndOfPattern {
static Event a1 = new Event(1, "a", 0.0);
static Event b1 = new Event(2, "b", 0.0);
static Event b2 = new Event(4, "b", 0.0);
static Event b3 = new Event(5, "b", 0.0);
private static List<List<Event>> compute(AfterMatchSkipStrategy skipStrategy)
throws Exception {
List<StreamRecord<Event>> streamEvents = new ArrayList<>();
streamEvents.add(new StreamRecord<>(a1));
streamEvents.add(new StreamRecord<>(b1));
streamEvents.add(new StreamRecord<>(b2));
streamEvents.add(new StreamRecord<>(b3));
Pattern<Event, ?> pattern =
Pattern.<Event>begin("start")
.where(SimpleCondition.of(value -> value.getName().equals("a")))
.next("end")
.where(SimpleCondition.of(value -> value.getName().equals("b")))
.oneOrMore();
NFATestHarness nfaTestHarness =
NFATestHarness.forPattern(pattern)
.withAfterMatchSkipStrategy(skipStrategy)
.build();
return nfaTestHarness.feedRecords(streamEvents);
}
}
@Test
public void testSkipPastLast() throws Exception {
List<StreamRecord<Event>> streamEvents = new ArrayList<>();
Event a1 = new Event(1, "a", 0.0);
Event a2 = new Event(2, "a", 0.0);
Event a3 = new Event(3, "a", 0.0);
Event a4 = new Event(4, "a", 0.0);
Event a5 = new Event(5, "a", 0.0);
Event a6 = new Event(6, "a", 0.0);
streamEvents.add(new StreamRecord<Event>(a1));
streamEvents.add(new StreamRecord<Event>(a2));
streamEvents.add(new StreamRecord<Event>(a3));
streamEvents.add(new StreamRecord<Event>(a4));
streamEvents.add(new StreamRecord<Event>(a5));
streamEvents.add(new StreamRecord<Event>(a6));
Pattern<Event, ?> pattern =
Pattern.<Event>begin("start", AfterMatchSkipStrategy.skipPastLastEvent())
.where(SimpleCondition.of(value -> value.getName().equals("a")))
.times(3);
NFATestHarness nfaTestHarness = NFATestHarness.forPattern(pattern).build();
List<List<Event>> resultingPatterns = nfaTestHarness.feedRecords(streamEvents);
comparePatterns(
resultingPatterns,
Lists.newArrayList(Lists.newArrayList(a1, a2, a3), Lists.newArrayList(a4, a5, a6)));
}
@Test
public void testSkipToFirst() throws Exception {
List<StreamRecord<Event>> streamEvents = new ArrayList<>();
Event ab1 = new Event(1, "ab", 0.0);
Event ab2 = new Event(2, "ab", 0.0);
Event ab3 = new Event(3, "ab", 0.0);
Event ab4 = new Event(4, "ab", 0.0);
Event ab5 = new Event(5, "ab", 0.0);
Event ab6 = new Event(6, "ab", 0.0);
streamEvents.add(new StreamRecord<Event>(ab1));
streamEvents.add(new StreamRecord<Event>(ab2));
streamEvents.add(new StreamRecord<Event>(ab3));
streamEvents.add(new StreamRecord<Event>(ab4));
streamEvents.add(new StreamRecord<Event>(ab5));
streamEvents.add(new StreamRecord<Event>(ab6));
Pattern<Event, ?> pattern =
Pattern.<Event>begin("start", AfterMatchSkipStrategy.skipToFirst("end"))
.where(SimpleCondition.of(value -> value.getName().contains("a")))
.times(2)
.next("end")
.where(SimpleCondition.of(value -> value.getName().contains("b")))
.times(2);
NFATestHarness nfaTestHarness = NFATestHarness.forPattern(pattern).build();
List<List<Event>> resultingPatterns = nfaTestHarness.feedRecords(streamEvents);
comparePatterns(
resultingPatterns,
Lists.newArrayList(
Lists.newArrayList(ab1, ab2, ab3, ab4),
Lists.newArrayList(ab3, ab4, ab5, ab6)));
}
@Test
public void testSkipToLast() throws Exception {
List<StreamRecord<Event>> streamEvents = new ArrayList<>();
Event ab1 = new Event(1, "ab", 0.0);
Event ab2 = new Event(2, "ab", 0.0);
Event ab3 = new Event(3, "ab", 0.0);
Event ab4 = new Event(4, "ab", 0.0);
Event ab5 = new Event(5, "ab", 0.0);
Event ab6 = new Event(6, "ab", 0.0);
Event ab7 = new Event(7, "ab", 0.0);
streamEvents.add(new StreamRecord<Event>(ab1));
streamEvents.add(new StreamRecord<Event>(ab2));
streamEvents.add(new StreamRecord<Event>(ab3));
streamEvents.add(new StreamRecord<Event>(ab4));
streamEvents.add(new StreamRecord<Event>(ab5));
streamEvents.add(new StreamRecord<Event>(ab6));
streamEvents.add(new StreamRecord<Event>(ab7));
Pattern<Event, ?> pattern =
Pattern.<Event>begin("start", AfterMatchSkipStrategy.skipToLast("end"))
.where(SimpleCondition.of(value -> value.getName().contains("a")))
.times(2)
.next("end")
.where(SimpleCondition.of(value -> value.getName().contains("b")))
.times(2);
NFATestHarness nfaTestHarness = NFATestHarness.forPattern(pattern).build();
List<List<Event>> resultingPatterns = nfaTestHarness.feedRecords(streamEvents);
comparePatterns(
resultingPatterns,
Lists.newArrayList(
Lists.newArrayList(ab1, ab2, ab3, ab4),
Lists.newArrayList(ab4, ab5, ab6, ab7)));
}
@Test
public void testSkipPastLast2() throws Exception {
List<StreamRecord<Event>> streamEvents = new ArrayList<>();
Event a1 = new Event(1, "a1", 0.0);
Event a2 = new Event(2, "a2", 0.0);
Event b1 = new Event(3, "b1", 0.0);
Event b2 = new Event(4, "b2", 0.0);
Event c1 = new Event(5, "c1", 0.0);
Event c2 = new Event(6, "c2", 0.0);
Event d1 = new Event(7, "d1", 0.0);
Event d2 = new Event(7, "d2", 0.0);
streamEvents.add(new StreamRecord<>(a1));
streamEvents.add(new StreamRecord<>(a2));
streamEvents.add(new StreamRecord<>(b1));
streamEvents.add(new StreamRecord<>(b2));
streamEvents.add(new StreamRecord<>(c1));
streamEvents.add(new StreamRecord<>(c2));
streamEvents.add(new StreamRecord<>(d1));
streamEvents.add(new StreamRecord<>(d2));
Pattern<Event, ?> pattern =
Pattern.<Event>begin("a", AfterMatchSkipStrategy.skipPastLastEvent())
.where(SimpleCondition.of(value -> value.getName().contains("a")))
.followedByAny("b")
.where(SimpleCondition.of(value -> value.getName().contains("b")))
.followedByAny("c")
.where(SimpleCondition.of(value -> value.getName().contains("c")))
.followedBy("d")
.where(SimpleCondition.of(value -> value.getName().contains("d")));
NFATestHarness nfaTestHarness = NFATestHarness.forPattern(pattern).build();
List<List<Event>> resultingPatterns = nfaTestHarness.feedRecords(streamEvents);
comparePatterns(
resultingPatterns, Collections.singletonList(Lists.newArrayList(a1, b1, c1, d1)));
}
@Test
public void testSkipPastLast3() throws Exception {
List<StreamRecord<Event>> streamEvents = new ArrayList<>();
Event a1 = new Event(1, "a1", 0.0);
Event c = new Event(2, "c", 0.0);
Event a2 = new Event(3, "a2", 0.0);
Event b2 = new Event(4, "b2", 0.0);
streamEvents.add(new StreamRecord<Event>(a1));
streamEvents.add(new StreamRecord<Event>(c));
streamEvents.add(new StreamRecord<Event>(a2));
streamEvents.add(new StreamRecord<Event>(b2));
Pattern<Event, ?> pattern =
Pattern.<Event>begin("a", AfterMatchSkipStrategy.skipPastLastEvent())
.where(SimpleCondition.of(value -> value.getName().contains("a")))
.next("b")
.where(SimpleCondition.of(value -> value.getName().contains("b")));
NFATestHarness nfaTestHarness = NFATestHarness.forPattern(pattern).build();
List<List<Event>> resultingPatterns = nfaTestHarness.feedRecords(streamEvents);
comparePatterns(
resultingPatterns, Lists.<List<Event>>newArrayList(Lists.newArrayList(a2, b2)));
}
@Test
public void testSkipToFirstWithOptionalMatch() throws Exception {
List<StreamRecord<Event>> streamEvents = new ArrayList<>();
Event ab1 = new Event(1, "ab1", 0.0);
Event c1 = new Event(2, "c1", 0.0);
Event ab2 = new Event(3, "ab2", 0.0);
Event c2 = new Event(4, "c2", 0.0);
streamEvents.add(new StreamRecord<Event>(ab1));
streamEvents.add(new StreamRecord<Event>(c1));
streamEvents.add(new StreamRecord<Event>(ab2));
streamEvents.add(new StreamRecord<Event>(c2));
Pattern<Event, ?> pattern =
Pattern.<Event>begin("x", AfterMatchSkipStrategy.skipToFirst("b"))
.where(SimpleCondition.of(value -> value.getName().contains("x")))
.oneOrMore()
.optional()
.next("b")
.where(SimpleCondition.of(value -> value.getName().contains("b")))
.next("c")
.where(SimpleCondition.of(value -> value.getName().contains("c")));
NFATestHarness nfaTestHarness = NFATestHarness.forPattern(pattern).build();
List<List<Event>> resultingPatterns = nfaTestHarness.feedRecords(streamEvents);
comparePatterns(
resultingPatterns,
Lists.newArrayList(Lists.newArrayList(ab1, c1), Lists.newArrayList(ab2, c2)));
}
@Test
public void testSkipToFirstAtStartPosition() throws Exception {
List<StreamRecord<Event>> streamEvents = new ArrayList<>();
Event ab1 = new Event(1, "ab1", 0.0);
Event c1 = new Event(2, "c1", 0.0);
Event ab2 = new Event(3, "ab2", 0.0);
Event c2 = new Event(4, "c2", 0.0);
streamEvents.add(new StreamRecord<Event>(ab1));
streamEvents.add(new StreamRecord<Event>(c1));
streamEvents.add(new StreamRecord<Event>(ab2));
streamEvents.add(new StreamRecord<Event>(c2));
Pattern<Event, ?> pattern =
Pattern.<Event>begin("b", AfterMatchSkipStrategy.skipToFirst("b"))
.where(SimpleCondition.of(value -> value.getName().contains("b")))
.next("c")
.where(SimpleCondition.of(value -> value.getName().contains("c")));
NFATestHarness nfaTestHarness = NFATestHarness.forPattern(pattern).build();
List<List<Event>> resultingPatterns = nfaTestHarness.feedRecords(streamEvents);
comparePatterns(
resultingPatterns,
Lists.newArrayList(Lists.newArrayList(ab1, c1), Lists.newArrayList(ab2, c2)));
}
@Test
public void testSkipToFirstWithOneOrMore() throws Exception {
List<StreamRecord<Event>> streamEvents = new ArrayList<>();
Event a1 = new Event(1, "a1", 0.0);
Event b1 = new Event(2, "b1", 0.0);
Event a2 = new Event(3, "a2", 0.0);
Event b2 = new Event(4, "b2", 0.0);
Event b3 = new Event(5, "b3", 0.0);
Event a3 = new Event(3, "a3", 0.0);
Event b4 = new Event(4, "b4", 0.0);
streamEvents.add(new StreamRecord<Event>(a1));
streamEvents.add(new StreamRecord<Event>(b1));
streamEvents.add(new StreamRecord<Event>(a2));
streamEvents.add(new StreamRecord<Event>(b2));
streamEvents.add(new StreamRecord<Event>(b3));
streamEvents.add(new StreamRecord<Event>(a3));
streamEvents.add(new StreamRecord<Event>(b4));
Pattern<Event, ?> pattern =
Pattern.<Event>begin("a", AfterMatchSkipStrategy.skipToFirst("b"))
.where(SimpleCondition.of(value -> value.getName().contains("a")))
.next("b")
.where(SimpleCondition.of(value -> value.getName().contains("b")))
.oneOrMore()
.consecutive();
NFATestHarness nfaTestHarness = NFATestHarness.forPattern(pattern).build();
List<List<Event>> resultingPatterns = nfaTestHarness.feedRecords(streamEvents);
comparePatterns(
resultingPatterns,
Lists.newArrayList(
Lists.newArrayList(a1, b1),
Lists.newArrayList(a2, b2),
Lists.newArrayList(a3, b4)));
}
@Test(expected = FlinkRuntimeException.class)
public void testSkipToFirstElementOfMatch() throws Exception {
List<StreamRecord<Event>> streamEvents = new ArrayList<>();
Event a1 = new Event(1, "a1", 0.0);
streamEvents.add(new StreamRecord<Event>(a1));
Pattern<Event, ?> pattern =
Pattern.<Event>begin(
"a", AfterMatchSkipStrategy.skipToFirst("a").throwExceptionOnMiss())
.where(SimpleCondition.of(value -> value.getName().contains("a")));
NFATestHarness nfaTestHarness = NFATestHarness.forPattern(pattern).build();
List<List<Event>> resultingPatterns = nfaTestHarness.feedRecords(streamEvents);
// skip to first element of a match should throw exception if they are enabled,
// this mode is used in MATCH RECOGNIZE which assumes that skipping to first element
// would result in infinite loop. In CEP by default(with exceptions disabled), we use no
// skip
// strategy in this case.
}
@Test(expected = FlinkRuntimeException.class)
public void testSkipToFirstNonExistentPosition() throws Exception {
MissedSkipTo.compute(AfterMatchSkipStrategy.skipToFirst("b").throwExceptionOnMiss());
// exception should be thrown
}
@Test
public void testSkipToFirstNonExistentPositionWithoutException() throws Exception {
List<List<Event>> resultingPatterns =
MissedSkipTo.compute(AfterMatchSkipStrategy.skipToFirst("b"));
comparePatterns(
resultingPatterns,
Collections.singletonList(Lists.newArrayList(MissedSkipTo.a, MissedSkipTo.c)));
}
@Test(expected = FlinkRuntimeException.class)
public void testSkipToLastNonExistentPosition() throws Exception {
MissedSkipTo.compute(AfterMatchSkipStrategy.skipToLast("b").throwExceptionOnMiss());
// exception should be thrown
}
@Test
public void testSkipToLastNonExistentPositionWithoutException() throws Exception {
List<List<Event>> resultingPatterns =
MissedSkipTo.compute(AfterMatchSkipStrategy.skipToFirst("b"));
comparePatterns(
resultingPatterns,
Collections.singletonList(Lists.newArrayList(MissedSkipTo.a, MissedSkipTo.c)));
}
static class MissedSkipTo {
static Event a = new Event(1, "a", 0.0);
static Event c = new Event(4, "c", 0.0);
static List<List<Event>> compute(AfterMatchSkipStrategy skipStrategy) throws Exception {
List<StreamRecord<Event>> streamEvents = new ArrayList<>();
streamEvents.add(new StreamRecord<>(a));
streamEvents.add(new StreamRecord<>(c));
Pattern<Event, ?> pattern =
Pattern.<Event>begin("a")
.where(SimpleCondition.of(value -> value.getName().contains("a")))
.next("b")
.where(SimpleCondition.of(value -> value.getName().contains("b")))
.oneOrMore()
.optional()
.consecutive()
.next("c")
.where(SimpleCondition.of(value -> value.getName().contains("c")));
NFATestHarness nfaTestHarness =
NFATestHarness.forPattern(pattern)
.withAfterMatchSkipStrategy(skipStrategy)
.build();
return nfaTestHarness.feedRecords(streamEvents);
}
}
@Test
public void testSkipToLastWithOneOrMore() throws Exception {
List<StreamRecord<Event>> streamEvents = new ArrayList<>();
Event a1 = new Event(1, "a1", 0.0);
Event b1 = new Event(2, "b1", 0.0);
Event a2 = new Event(3, "a2", 0.0);
Event b2 = new Event(4, "b2", 0.0);
Event b3 = new Event(5, "b3", 0.0);
Event a3 = new Event(3, "a3", 0.0);
Event b4 = new Event(4, "b4", 0.0);
streamEvents.add(new StreamRecord<Event>(a1));
streamEvents.add(new StreamRecord<Event>(b1));
streamEvents.add(new StreamRecord<Event>(a2));
streamEvents.add(new StreamRecord<Event>(b2));
streamEvents.add(new StreamRecord<Event>(b3));
streamEvents.add(new StreamRecord<Event>(a3));
streamEvents.add(new StreamRecord<Event>(b4));
Pattern<Event, ?> pattern =
Pattern.<Event>begin("a", AfterMatchSkipStrategy.skipToLast("b"))
.where(SimpleCondition.of(value -> value.getName().contains("a")))
.next("b")
.where(SimpleCondition.of(value -> value.getName().contains("b")))
.oneOrMore()
.consecutive();
NFATestHarness nfaTestHarness = NFATestHarness.forPattern(pattern).build();
List<List<Event>> resultingPatterns = nfaTestHarness.feedRecords(streamEvents);
comparePatterns(
resultingPatterns,
Lists.newArrayList(
Lists.newArrayList(a1, b1),
Lists.newArrayList(a2, b2),
Lists.newArrayList(a3, b4)));
}
/** Example from docs. */
@Test
public void testSkipPastLastWithOneOrMoreAtBeginning() throws Exception {
List<StreamRecord<Event>> streamEvents = new ArrayList<>();
Event a1 = new Event(1, "a1", 0.0);
Event a2 = new Event(2, "a2", 0.0);
Event a3 = new Event(3, "a3", 0.0);
Event b1 = new Event(4, "b1", 0.0);
streamEvents.add(new StreamRecord<>(a1));
streamEvents.add(new StreamRecord<>(a2));
streamEvents.add(new StreamRecord<>(a3));
streamEvents.add(new StreamRecord<>(b1));
Pattern<Event, ?> pattern =
Pattern.<Event>begin("a", AfterMatchSkipStrategy.skipPastLastEvent())
.where(SimpleCondition.of(value -> value.getName().contains("a")))
.oneOrMore()
.consecutive()
.greedy()
.next("b")
.where(SimpleCondition.of(value -> value.getName().contains("b")));
NFATestHarness nfaTestHarness = NFATestHarness.forPattern(pattern).build();
List<List<Event>> resultingPatterns = nfaTestHarness.feedRecords(streamEvents);
comparePatterns(
resultingPatterns, Collections.singletonList(Lists.newArrayList(a1, a2, a3, b1)));
}
/** Example from docs. */
@Test
public void testSkipToLastWithOneOrMoreAtBeginning() throws Exception {
List<StreamRecord<Event>> streamEvents = new ArrayList<>();
Event a1 = new Event(1, "a1", 0.0);
Event a2 = new Event(2, "a2", 0.0);
Event a3 = new Event(3, "a3", 0.0);
Event b1 = new Event(4, "b1", 0.0);
streamEvents.add(new StreamRecord<>(a1));
streamEvents.add(new StreamRecord<>(a2));
streamEvents.add(new StreamRecord<>(a3));
streamEvents.add(new StreamRecord<>(b1));
Pattern<Event, ?> pattern =
Pattern.<Event>begin("a", AfterMatchSkipStrategy.skipToLast("a"))
.where(SimpleCondition.of(value -> value.getName().contains("a")))
.oneOrMore()
.consecutive()
.greedy()
.next("b")
.where(SimpleCondition.of(value -> value.getName().contains("b")));
NFATestHarness nfaTestHarness = NFATestHarness.forPattern(pattern).build();
List<List<Event>> resultingPatterns = nfaTestHarness.feedRecords(streamEvents);
comparePatterns(
resultingPatterns,
Lists.newArrayList(Lists.newArrayList(a1, a2, a3, b1), Lists.newArrayList(a3, b1)));
}
/** Example from docs. */
@Test
public void testSkipToFirstWithOneOrMoreAtBeginning() throws Exception {
List<StreamRecord<Event>> streamEvents = new ArrayList<>();
Event a1 = new Event(1, "a1", 0.0);
Event a2 = new Event(2, "a2", 0.0);
Event a3 = new Event(3, "a3", 0.0);
Event b1 = new Event(4, "b1", 0.0);
streamEvents.add(new StreamRecord<>(a1));
streamEvents.add(new StreamRecord<>(a2));
streamEvents.add(new StreamRecord<>(a3));
streamEvents.add(new StreamRecord<>(b1));
Pattern<Event, ?> pattern =
Pattern.<Event>begin("a", AfterMatchSkipStrategy.skipToFirst("a"))
.where(SimpleCondition.of(value -> value.getName().contains("a")))
.oneOrMore()
.consecutive()
.greedy()
.next("b")
.where(SimpleCondition.of(value -> value.getName().contains("b")));
NFATestHarness nfaTestHarness = NFATestHarness.forPattern(pattern).build();
List<List<Event>> resultingPatterns = nfaTestHarness.feedRecords(streamEvents);
comparePatterns(
resultingPatterns,
Lists.newArrayList(
Lists.newArrayList(a1, a2, a3, b1),
Lists.newArrayList(a2, a3, b1),
Lists.newArrayList(a3, b1)));
}
/** Example from docs. */
@Test
public void testNoSkipWithOneOrMoreAtBeginning() throws Exception {
List<StreamRecord<Event>> streamEvents = new ArrayList<>();
Event a1 = new Event(1, "a1", 0.0);
Event a2 = new Event(2, "a2", 0.0);
Event a3 = new Event(3, "a3", 0.0);
Event b1 = new Event(4, "b1", 0.0);
streamEvents.add(new StreamRecord<>(a1));
streamEvents.add(new StreamRecord<>(a2));
streamEvents.add(new StreamRecord<>(a3));
streamEvents.add(new StreamRecord<>(b1));
Pattern<Event, ?> pattern =
Pattern.<Event>begin("a", AfterMatchSkipStrategy.noSkip())
.where(SimpleCondition.of(value -> value.getName().contains("a")))
.oneOrMore()
.consecutive()
.greedy()
.next("b")
.where(SimpleCondition.of(value -> value.getName().contains("b")));
NFATestHarness nfaTestHarness = NFATestHarness.forPattern(pattern).build();
List<List<Event>> resultingPatterns = nfaTestHarness.feedRecords(streamEvents);
comparePatterns(
resultingPatterns,
Lists.newArrayList(
Lists.newArrayList(a1, a2, a3, b1),
Lists.newArrayList(a2, a3, b1),
Lists.newArrayList(a3, b1)));
}
/** Example from docs. */
@Test
public void testSkipToFirstDiscarding() throws Exception {
List<StreamRecord<Event>> streamEvents = new ArrayList<>();
Event a = new Event(1, "a", 0.0);
Event b = new Event(2, "b", 0.0);
Event c1 = new Event(3, "c1", 0.0);
Event c2 = new Event(4, "c2", 0.0);
Event c3 = new Event(5, "c3", 0.0);
Event d = new Event(6, "d", 0.0);
streamEvents.add(new StreamRecord<>(a));
streamEvents.add(new StreamRecord<>(b));
streamEvents.add(new StreamRecord<>(c1));
streamEvents.add(new StreamRecord<>(c2));
streamEvents.add(new StreamRecord<>(c3));
streamEvents.add(new StreamRecord<>(d));
Pattern<Event, ?> pattern =
Pattern.<Event>begin("a or c", AfterMatchSkipStrategy.skipToFirst("c*"))
.where(
SimpleCondition.of(
value ->
value.getName().contains("a")
|| value.getName().contains("c")))
.followedBy("b or c")
.where(
SimpleCondition.of(
value ->
value.getName().contains("b")
|| value.getName().contains("c")))
.followedBy("c*")
.where(SimpleCondition.of(value -> value.getName().contains("c")))
.oneOrMore()
.greedy()
.followedBy("d")
.where(SimpleCondition.of(value -> value.getName().contains("d")));
NFATestHarness nfaTestHarness = NFATestHarness.forPattern(pattern).build();
List<List<Event>> resultingPatterns = nfaTestHarness.feedRecords(streamEvents);
comparePatterns(
resultingPatterns,
Lists.newArrayList(
Lists.newArrayList(a, b, c1, c2, c3, d),
Lists.newArrayList(c1, c2, c3, d)));
}
@Test
public void testSkipBeforeOtherAlreadyCompleted() throws Exception {
List<StreamRecord<Event>> streamEvents = new ArrayList<>();
Event a1 = new Event(1, "a1", 0.0);
Event c1 = new Event(2, "c1", 0.0);
Event a2 = new Event(3, "a2", 1.0);
Event c2 = new Event(4, "c2", 0.0);
Event b1 = new Event(5, "b1", 1.0);
Event b2 = new Event(6, "b2", 0.0);
streamEvents.add(new StreamRecord<>(a1));
streamEvents.add(new StreamRecord<>(c1));
streamEvents.add(new StreamRecord<>(a2));
streamEvents.add(new StreamRecord<>(c2));
streamEvents.add(new StreamRecord<>(b1));
streamEvents.add(new StreamRecord<>(b2));
Pattern<Event, ?> pattern =
Pattern.<Event>begin("a", AfterMatchSkipStrategy.skipToFirst("c"))
.where(SimpleCondition.of(value -> value.getName().contains("a")))
.followedBy("c")
.where(SimpleCondition.of(value -> value.getName().contains("c")))
.followedBy("b")
.where(
new IterativeCondition<Event>() {
@Override
public boolean filter(Event value, Context<Event> ctx)
throws Exception {
return value.getName().contains("b")
&& ctx.getEventsForPattern("a")
.iterator()
.next()
.getPrice()
== value.getPrice();
}
});
NFATestHarness nfaTestHarness = NFATestHarness.forPattern(pattern).build();
List<List<Event>> resultingPatterns = nfaTestHarness.feedRecords(streamEvents);
comparePatterns(
resultingPatterns,
Lists.newArrayList(Lists.newArrayList(a1, c1, b2), Lists.newArrayList(a2, c2, b1)));
}
@Test
public void testSharedBufferIsProperlyCleared() throws Exception {
List<StreamRecord<Event>> inputEvents = new ArrayList<>();
for (int i = 0; i < 4; i++) {
inputEvents.add(new StreamRecord<>(new Event(1, "a", 1.0), i));
}
SkipPastLastStrategy matchSkipStrategy = AfterMatchSkipStrategy.skipPastLastEvent();
Pattern<Event, ?> pattern =
Pattern.<Event>begin("start", matchSkipStrategy)
.where(SimpleCondition.of(value -> true))
.times(2);
SharedBuffer<Event> sharedBuffer =
TestSharedBuffer.createTestBuffer(Event.createTypeSerializer());
NFATestHarness nfaTestHarness =
NFATestHarness.forPattern(pattern).withSharedBuffer(sharedBuffer).build();
nfaTestHarness.feedRecords(inputEvents);
assertThat(sharedBuffer.isEmpty(), Matchers.is(true));
}
}
|
googleapis/google-api-java-client-services | 37,208 | clients/google-api-services-compute/alpha/2.0.0/com/google/api/services/compute/model/Firewall.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.compute.model;
/**
* Represents a Firewall Rule resource.
*
* Firewall rules allow or deny ingress traffic to, and egress traffic from your instances. For more
* information, readFirewall rules.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Firewall extends com.google.api.client.json.GenericJson {
/**
* The list of ALLOW rules specified by this firewall. Each rule specifies a protocol and port-
* range tuple that describes a permitted connection.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<Allowed> allowed;
static {
// hack to force ProGuard to consider Allowed used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(Allowed.class);
}
/**
* [Output Only] Creation timestamp inRFC3339 text format.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String creationTimestamp;
/**
* The list of DENY rules specified by this firewall. Each rule specifies a protocol and port-
* range tuple that describes a denied connection.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<Denied> denied;
static {
// hack to force ProGuard to consider Denied used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(Denied.class);
}
/**
* An optional description of this resource. Provide this field when you create the resource.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String description;
/**
* If destination ranges are specified, the firewall rule applies only to traffic that has
* destination IP address in these ranges. These ranges must be expressed inCIDR format. Both IPv4
* and IPv6 are supported.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> destinationRanges;
/**
* Direction of traffic to which this firewall applies, either `INGRESS` or `EGRESS`. The default
* is `INGRESS`. For `EGRESS` traffic, you cannot specify the sourceTags fields.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String direction;
/**
* Denotes whether the firewall rule is disabled. When set to true, the firewall rule is not
* enforced and the network behaves as if it did not exist. If this is unspecified, the firewall
* rule will be enabled.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean disabled;
/**
* Deprecated in favor of enable in LogConfig. This field denotes whether to enable logging for a
* particular firewall rule. If logging is enabled, logs will be exported t Cloud Logging.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean enableLogging;
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.math.BigInteger id;
/**
* [Output Only] Type of the resource. Always compute#firewall for firewall rules.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* This field denotes the logging options for a particular firewall rule. If logging is enabled,
* logs will be exported to Cloud Logging.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private FirewallLogConfig logConfig;
/**
* Name of the resource; provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply withRFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`. The first character must be
* a lowercase letter, and all following characters (except for the last character) must be a
* dash, lowercase letter, or digit. The last character must be a lowercase letter or digit.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* URL of the network resource for this firewall rule. If not specified when creating a firewall
* rule, the default network is used:
*
* global/networks/default
*
* If you choose to specify this field, you can specify the network as a full or partial URL. For
* example, the following are all valid URLs: -
* https://www.googleapis.com/compute/v1/projects/myproject/global/networks/my-network -
* projects/myproject/global/networks/my-network - global/networks/default
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String network;
/**
* Input only. [Input Only] Additional params passed with the request, but not persisted as part
* of resource payload.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private FirewallParams params;
/**
* Priority for this rule. This is an integer between `0` and `65535`, both inclusive. The default
* value is `1000`. Relative priorities determine which rule takes effect if multiple rules apply.
* Lower values indicate higher priority. For example, a rule with priority `0` has higher
* precedence than a rule with priority `1`. DENY rules take precedence over ALLOW rules if they
* have equal priority. Note that VPC networks have implied rules with a priority of `65535`. To
* avoid conflicts with the implied rules, use a priority number less than `65535`.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Integer priority;
/**
* [Output Only] Server-defined URL for the resource.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String selfLink;
/**
* [Output Only] Server-defined URL for this resource with the resource id.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String selfLinkWithId;
/**
* If source ranges are specified, the firewall rule applies only to traffic that has a source IP
* address in these ranges. These ranges must be expressed inCIDR format. One or both of
* sourceRanges and sourceTags may be set. If both fields are set, the rule applies to traffic
* that has a source IP address within sourceRanges OR a source IP from a resource with a matching
* tag listed in thesourceTags field. The connection does not need to match both fields for the
* rule to apply. Both IPv4 and IPv6 are supported.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> sourceRanges;
/**
* If source service accounts are specified, the firewall rules apply only to traffic originating
* from an instance with a service account in this list. Source service accounts cannot be used to
* control traffic to an instance's external IP address because service accounts are associated
* with an instance, not an IP address.sourceRanges can be set at the same time
* assourceServiceAccounts. If both are set, the firewall applies to traffic that has a source IP
* address within the sourceRanges OR a source IP that belongs to an instance with service account
* listed insourceServiceAccount. The connection does not need to match both fields for the
* firewall to apply.sourceServiceAccounts cannot be used at the same time assourceTags or
* targetTags.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> sourceServiceAccounts;
/**
* If source tags are specified, the firewall rule applies only to traffic with source IPs that
* match the primary network interfaces of VM instances that have the tag and are in the same VPC
* network. Source tags cannot be used to control traffic to an instance's external IP address, it
* only applies to traffic between instances in the same virtual network. Because tags are
* associated with instances, not IP addresses. One or both of sourceRanges and sourceTags may be
* set. If both fields are set, the firewall applies to traffic that has a source IP address
* within sourceRanges OR a source IP from a resource with a matching tag listed in the sourceTags
* field. The connection does not need to match both fields for the firewall to apply.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> sourceTags;
/**
* A list of service accounts indicating sets of instances located in the network that may make
* network connections as specified inallowed[].targetServiceAccounts cannot be used at the same
* time astargetTags or sourceTags. If neither targetServiceAccounts nor targetTags are specified,
* the firewall rule applies to all instances on the specified network.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> targetServiceAccounts;
/**
* A list of tags that controls which instances the firewall rule applies to. If targetTags are
* specified, then the firewall rule applies only to instances in the VPC network that have one of
* those tags. If no targetTags are specified, the firewall rule applies to all instances on the
* specified network.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> targetTags;
/**
* The list of ALLOW rules specified by this firewall. Each rule specifies a protocol and port-
* range tuple that describes a permitted connection.
* @return value or {@code null} for none
*/
public java.util.List<Allowed> getAllowed() {
return allowed;
}
/**
* The list of ALLOW rules specified by this firewall. Each rule specifies a protocol and port-
* range tuple that describes a permitted connection.
* @param allowed allowed or {@code null} for none
*/
public Firewall setAllowed(java.util.List<Allowed> allowed) {
this.allowed = allowed;
return this;
}
/**
* [Output Only] Creation timestamp inRFC3339 text format.
* @return value or {@code null} for none
*/
public java.lang.String getCreationTimestamp() {
return creationTimestamp;
}
/**
* [Output Only] Creation timestamp inRFC3339 text format.
* @param creationTimestamp creationTimestamp or {@code null} for none
*/
public Firewall setCreationTimestamp(java.lang.String creationTimestamp) {
this.creationTimestamp = creationTimestamp;
return this;
}
/**
* The list of DENY rules specified by this firewall. Each rule specifies a protocol and port-
* range tuple that describes a denied connection.
* @return value or {@code null} for none
*/
public java.util.List<Denied> getDenied() {
return denied;
}
/**
* The list of DENY rules specified by this firewall. Each rule specifies a protocol and port-
* range tuple that describes a denied connection.
* @param denied denied or {@code null} for none
*/
public Firewall setDenied(java.util.List<Denied> denied) {
this.denied = denied;
return this;
}
/**
* An optional description of this resource. Provide this field when you create the resource.
* @return value or {@code null} for none
*/
public java.lang.String getDescription() {
return description;
}
/**
* An optional description of this resource. Provide this field when you create the resource.
* @param description description or {@code null} for none
*/
public Firewall setDescription(java.lang.String description) {
this.description = description;
return this;
}
/**
* If destination ranges are specified, the firewall rule applies only to traffic that has
* destination IP address in these ranges. These ranges must be expressed inCIDR format. Both IPv4
* and IPv6 are supported.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getDestinationRanges() {
return destinationRanges;
}
/**
* If destination ranges are specified, the firewall rule applies only to traffic that has
* destination IP address in these ranges. These ranges must be expressed inCIDR format. Both IPv4
* and IPv6 are supported.
* @param destinationRanges destinationRanges or {@code null} for none
*/
public Firewall setDestinationRanges(java.util.List<java.lang.String> destinationRanges) {
this.destinationRanges = destinationRanges;
return this;
}
/**
* Direction of traffic to which this firewall applies, either `INGRESS` or `EGRESS`. The default
* is `INGRESS`. For `EGRESS` traffic, you cannot specify the sourceTags fields.
* @return value or {@code null} for none
*/
public java.lang.String getDirection() {
return direction;
}
/**
* Direction of traffic to which this firewall applies, either `INGRESS` or `EGRESS`. The default
* is `INGRESS`. For `EGRESS` traffic, you cannot specify the sourceTags fields.
* @param direction direction or {@code null} for none
*/
public Firewall setDirection(java.lang.String direction) {
this.direction = direction;
return this;
}
/**
* Denotes whether the firewall rule is disabled. When set to true, the firewall rule is not
* enforced and the network behaves as if it did not exist. If this is unspecified, the firewall
* rule will be enabled.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDisabled() {
return disabled;
}
/**
* Denotes whether the firewall rule is disabled. When set to true, the firewall rule is not
* enforced and the network behaves as if it did not exist. If this is unspecified, the firewall
* rule will be enabled.
* @param disabled disabled or {@code null} for none
*/
public Firewall setDisabled(java.lang.Boolean disabled) {
this.disabled = disabled;
return this;
}
/**
* Deprecated in favor of enable in LogConfig. This field denotes whether to enable logging for a
* particular firewall rule. If logging is enabled, logs will be exported t Cloud Logging.
* @return value or {@code null} for none
*/
public java.lang.Boolean getEnableLogging() {
return enableLogging;
}
/**
* Deprecated in favor of enable in LogConfig. This field denotes whether to enable logging for a
* particular firewall rule. If logging is enabled, logs will be exported t Cloud Logging.
* @param enableLogging enableLogging or {@code null} for none
*/
public Firewall setEnableLogging(java.lang.Boolean enableLogging) {
this.enableLogging = enableLogging;
return this;
}
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* @return value or {@code null} for none
*/
public java.math.BigInteger getId() {
return id;
}
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* @param id id or {@code null} for none
*/
public Firewall setId(java.math.BigInteger id) {
this.id = id;
return this;
}
/**
* [Output Only] Type of the resource. Always compute#firewall for firewall rules.
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* [Output Only] Type of the resource. Always compute#firewall for firewall rules.
* @param kind kind or {@code null} for none
*/
public Firewall setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* This field denotes the logging options for a particular firewall rule. If logging is enabled,
* logs will be exported to Cloud Logging.
* @return value or {@code null} for none
*/
public FirewallLogConfig getLogConfig() {
return logConfig;
}
/**
* This field denotes the logging options for a particular firewall rule. If logging is enabled,
* logs will be exported to Cloud Logging.
* @param logConfig logConfig or {@code null} for none
*/
public Firewall setLogConfig(FirewallLogConfig logConfig) {
this.logConfig = logConfig;
return this;
}
/**
* Name of the resource; provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply withRFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`. The first character must be
* a lowercase letter, and all following characters (except for the last character) must be a
* dash, lowercase letter, or digit. The last character must be a lowercase letter or digit.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Name of the resource; provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply withRFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`. The first character must be
* a lowercase letter, and all following characters (except for the last character) must be a
* dash, lowercase letter, or digit. The last character must be a lowercase letter or digit.
* @param name name or {@code null} for none
*/
public Firewall setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* URL of the network resource for this firewall rule. If not specified when creating a firewall
* rule, the default network is used:
*
* global/networks/default
*
* If you choose to specify this field, you can specify the network as a full or partial URL. For
* example, the following are all valid URLs: -
* https://www.googleapis.com/compute/v1/projects/myproject/global/networks/my-network -
* projects/myproject/global/networks/my-network - global/networks/default
* @return value or {@code null} for none
*/
public java.lang.String getNetwork() {
return network;
}
/**
* URL of the network resource for this firewall rule. If not specified when creating a firewall
* rule, the default network is used:
*
* global/networks/default
*
* If you choose to specify this field, you can specify the network as a full or partial URL. For
* example, the following are all valid URLs: -
* https://www.googleapis.com/compute/v1/projects/myproject/global/networks/my-network -
* projects/myproject/global/networks/my-network - global/networks/default
* @param network network or {@code null} for none
*/
public Firewall setNetwork(java.lang.String network) {
this.network = network;
return this;
}
/**
* Input only. [Input Only] Additional params passed with the request, but not persisted as part
* of resource payload.
* @return value or {@code null} for none
*/
public FirewallParams getParams() {
return params;
}
/**
* Input only. [Input Only] Additional params passed with the request, but not persisted as part
* of resource payload.
* @param params params or {@code null} for none
*/
public Firewall setParams(FirewallParams params) {
this.params = params;
return this;
}
/**
* Priority for this rule. This is an integer between `0` and `65535`, both inclusive. The default
* value is `1000`. Relative priorities determine which rule takes effect if multiple rules apply.
* Lower values indicate higher priority. For example, a rule with priority `0` has higher
* precedence than a rule with priority `1`. DENY rules take precedence over ALLOW rules if they
* have equal priority. Note that VPC networks have implied rules with a priority of `65535`. To
* avoid conflicts with the implied rules, use a priority number less than `65535`.
* @return value or {@code null} for none
*/
public java.lang.Integer getPriority() {
return priority;
}
/**
* Priority for this rule. This is an integer between `0` and `65535`, both inclusive. The default
* value is `1000`. Relative priorities determine which rule takes effect if multiple rules apply.
* Lower values indicate higher priority. For example, a rule with priority `0` has higher
* precedence than a rule with priority `1`. DENY rules take precedence over ALLOW rules if they
* have equal priority. Note that VPC networks have implied rules with a priority of `65535`. To
* avoid conflicts with the implied rules, use a priority number less than `65535`.
* @param priority priority or {@code null} for none
*/
public Firewall setPriority(java.lang.Integer priority) {
this.priority = priority;
return this;
}
/**
* [Output Only] Server-defined URL for the resource.
* @return value or {@code null} for none
*/
public java.lang.String getSelfLink() {
return selfLink;
}
/**
* [Output Only] Server-defined URL for the resource.
* @param selfLink selfLink or {@code null} for none
*/
public Firewall setSelfLink(java.lang.String selfLink) {
this.selfLink = selfLink;
return this;
}
/**
* [Output Only] Server-defined URL for this resource with the resource id.
* @return value or {@code null} for none
*/
public java.lang.String getSelfLinkWithId() {
return selfLinkWithId;
}
/**
* [Output Only] Server-defined URL for this resource with the resource id.
* @param selfLinkWithId selfLinkWithId or {@code null} for none
*/
public Firewall setSelfLinkWithId(java.lang.String selfLinkWithId) {
this.selfLinkWithId = selfLinkWithId;
return this;
}
/**
* If source ranges are specified, the firewall rule applies only to traffic that has a source IP
* address in these ranges. These ranges must be expressed inCIDR format. One or both of
* sourceRanges and sourceTags may be set. If both fields are set, the rule applies to traffic
* that has a source IP address within sourceRanges OR a source IP from a resource with a matching
* tag listed in thesourceTags field. The connection does not need to match both fields for the
* rule to apply. Both IPv4 and IPv6 are supported.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getSourceRanges() {
return sourceRanges;
}
/**
* If source ranges are specified, the firewall rule applies only to traffic that has a source IP
* address in these ranges. These ranges must be expressed inCIDR format. One or both of
* sourceRanges and sourceTags may be set. If both fields are set, the rule applies to traffic
* that has a source IP address within sourceRanges OR a source IP from a resource with a matching
* tag listed in thesourceTags field. The connection does not need to match both fields for the
* rule to apply. Both IPv4 and IPv6 are supported.
* @param sourceRanges sourceRanges or {@code null} for none
*/
public Firewall setSourceRanges(java.util.List<java.lang.String> sourceRanges) {
this.sourceRanges = sourceRanges;
return this;
}
/**
* If source service accounts are specified, the firewall rules apply only to traffic originating
* from an instance with a service account in this list. Source service accounts cannot be used to
* control traffic to an instance's external IP address because service accounts are associated
* with an instance, not an IP address.sourceRanges can be set at the same time
* assourceServiceAccounts. If both are set, the firewall applies to traffic that has a source IP
* address within the sourceRanges OR a source IP that belongs to an instance with service account
* listed insourceServiceAccount. The connection does not need to match both fields for the
* firewall to apply.sourceServiceAccounts cannot be used at the same time assourceTags or
* targetTags.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getSourceServiceAccounts() {
return sourceServiceAccounts;
}
/**
* If source service accounts are specified, the firewall rules apply only to traffic originating
* from an instance with a service account in this list. Source service accounts cannot be used to
* control traffic to an instance's external IP address because service accounts are associated
* with an instance, not an IP address.sourceRanges can be set at the same time
* assourceServiceAccounts. If both are set, the firewall applies to traffic that has a source IP
* address within the sourceRanges OR a source IP that belongs to an instance with service account
* listed insourceServiceAccount. The connection does not need to match both fields for the
* firewall to apply.sourceServiceAccounts cannot be used at the same time assourceTags or
* targetTags.
* @param sourceServiceAccounts sourceServiceAccounts or {@code null} for none
*/
public Firewall setSourceServiceAccounts(java.util.List<java.lang.String> sourceServiceAccounts) {
this.sourceServiceAccounts = sourceServiceAccounts;
return this;
}
/**
* If source tags are specified, the firewall rule applies only to traffic with source IPs that
* match the primary network interfaces of VM instances that have the tag and are in the same VPC
* network. Source tags cannot be used to control traffic to an instance's external IP address, it
* only applies to traffic between instances in the same virtual network. Because tags are
* associated with instances, not IP addresses. One or both of sourceRanges and sourceTags may be
* set. If both fields are set, the firewall applies to traffic that has a source IP address
* within sourceRanges OR a source IP from a resource with a matching tag listed in the sourceTags
* field. The connection does not need to match both fields for the firewall to apply.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getSourceTags() {
return sourceTags;
}
/**
* If source tags are specified, the firewall rule applies only to traffic with source IPs that
* match the primary network interfaces of VM instances that have the tag and are in the same VPC
* network. Source tags cannot be used to control traffic to an instance's external IP address, it
* only applies to traffic between instances in the same virtual network. Because tags are
* associated with instances, not IP addresses. One or both of sourceRanges and sourceTags may be
* set. If both fields are set, the firewall applies to traffic that has a source IP address
* within sourceRanges OR a source IP from a resource with a matching tag listed in the sourceTags
* field. The connection does not need to match both fields for the firewall to apply.
* @param sourceTags sourceTags or {@code null} for none
*/
public Firewall setSourceTags(java.util.List<java.lang.String> sourceTags) {
this.sourceTags = sourceTags;
return this;
}
/**
* A list of service accounts indicating sets of instances located in the network that may make
* network connections as specified inallowed[].targetServiceAccounts cannot be used at the same
* time astargetTags or sourceTags. If neither targetServiceAccounts nor targetTags are specified,
* the firewall rule applies to all instances on the specified network.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getTargetServiceAccounts() {
return targetServiceAccounts;
}
/**
* A list of service accounts indicating sets of instances located in the network that may make
* network connections as specified inallowed[].targetServiceAccounts cannot be used at the same
* time astargetTags or sourceTags. If neither targetServiceAccounts nor targetTags are specified,
* the firewall rule applies to all instances on the specified network.
* @param targetServiceAccounts targetServiceAccounts or {@code null} for none
*/
public Firewall setTargetServiceAccounts(java.util.List<java.lang.String> targetServiceAccounts) {
this.targetServiceAccounts = targetServiceAccounts;
return this;
}
/**
* A list of tags that controls which instances the firewall rule applies to. If targetTags are
* specified, then the firewall rule applies only to instances in the VPC network that have one of
* those tags. If no targetTags are specified, the firewall rule applies to all instances on the
* specified network.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getTargetTags() {
return targetTags;
}
/**
* A list of tags that controls which instances the firewall rule applies to. If targetTags are
* specified, then the firewall rule applies only to instances in the VPC network that have one of
* those tags. If no targetTags are specified, the firewall rule applies to all instances on the
* specified network.
* @param targetTags targetTags or {@code null} for none
*/
public Firewall setTargetTags(java.util.List<java.lang.String> targetTags) {
this.targetTags = targetTags;
return this;
}
@Override
public Firewall set(String fieldName, Object value) {
return (Firewall) super.set(fieldName, value);
}
@Override
public Firewall clone() {
return (Firewall) super.clone();
}
/**
* Model definition for FirewallAllowed.
*/
public static final class Allowed extends com.google.api.client.json.GenericJson {
/**
* The IP protocol to which this rule applies. The protocol type is required when creating a
* firewall rule. This value can either be one of the following well known protocol strings (tcp,
* udp,icmp, esp, ah, ipip,sctp) or the IP protocol number.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key("IPProtocol")
private java.lang.String iPProtocol;
/**
* An optional list of ports to which this rule applies. This field is only applicable for the UDP
* or TCP protocol. Each entry must be either an integer or a range. If not specified, this rule
* applies to connections through any port.
*
* Example inputs include: ["22"], ["80","443"], and ["12345-12349"].
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> ports;
/**
* The IP protocol to which this rule applies. The protocol type is required when creating a
* firewall rule. This value can either be one of the following well known protocol strings (tcp,
* udp,icmp, esp, ah, ipip,sctp) or the IP protocol number.
* @return value or {@code null} for none
*/
public java.lang.String getIPProtocol() {
return iPProtocol;
}
/**
* The IP protocol to which this rule applies. The protocol type is required when creating a
* firewall rule. This value can either be one of the following well known protocol strings (tcp,
* udp,icmp, esp, ah, ipip,sctp) or the IP protocol number.
* @param iPProtocol iPProtocol or {@code null} for none
*/
public Allowed setIPProtocol(java.lang.String iPProtocol) {
this.iPProtocol = iPProtocol;
return this;
}
/**
* An optional list of ports to which this rule applies. This field is only applicable for the UDP
* or TCP protocol. Each entry must be either an integer or a range. If not specified, this rule
* applies to connections through any port.
*
* Example inputs include: ["22"], ["80","443"], and ["12345-12349"].
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getPorts() {
return ports;
}
/**
* An optional list of ports to which this rule applies. This field is only applicable for the UDP
* or TCP protocol. Each entry must be either an integer or a range. If not specified, this rule
* applies to connections through any port.
*
* Example inputs include: ["22"], ["80","443"], and ["12345-12349"].
* @param ports ports or {@code null} for none
*/
public Allowed setPorts(java.util.List<java.lang.String> ports) {
this.ports = ports;
return this;
}
@Override
public Allowed set(String fieldName, Object value) {
return (Allowed) super.set(fieldName, value);
}
@Override
public Allowed clone() {
return (Allowed) super.clone();
}
}
/**
* Model definition for FirewallDenied.
*/
public static final class Denied extends com.google.api.client.json.GenericJson {
/**
* The IP protocol to which this rule applies. The protocol type is required when creating a
* firewall rule. This value can either be one of the following well known protocol strings (tcp,
* udp,icmp, esp, ah, ipip,sctp) or the IP protocol number.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key("IPProtocol")
private java.lang.String iPProtocol;
/**
* An optional list of ports to which this rule applies. This field is only applicable for the UDP
* or TCP protocol. Each entry must be either an integer or a range. If not specified, this rule
* applies to connections through any port.
*
* Example inputs include: ["22"], ["80","443"], and ["12345-12349"].
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> ports;
/**
* The IP protocol to which this rule applies. The protocol type is required when creating a
* firewall rule. This value can either be one of the following well known protocol strings (tcp,
* udp,icmp, esp, ah, ipip,sctp) or the IP protocol number.
* @return value or {@code null} for none
*/
public java.lang.String getIPProtocol() {
return iPProtocol;
}
/**
* The IP protocol to which this rule applies. The protocol type is required when creating a
* firewall rule. This value can either be one of the following well known protocol strings (tcp,
* udp,icmp, esp, ah, ipip,sctp) or the IP protocol number.
* @param iPProtocol iPProtocol or {@code null} for none
*/
public Denied setIPProtocol(java.lang.String iPProtocol) {
this.iPProtocol = iPProtocol;
return this;
}
/**
* An optional list of ports to which this rule applies. This field is only applicable for the UDP
* or TCP protocol. Each entry must be either an integer or a range. If not specified, this rule
* applies to connections through any port.
*
* Example inputs include: ["22"], ["80","443"], and ["12345-12349"].
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getPorts() {
return ports;
}
/**
* An optional list of ports to which this rule applies. This field is only applicable for the UDP
* or TCP protocol. Each entry must be either an integer or a range. If not specified, this rule
* applies to connections through any port.
*
* Example inputs include: ["22"], ["80","443"], and ["12345-12349"].
* @param ports ports or {@code null} for none
*/
public Denied setPorts(java.util.List<java.lang.String> ports) {
this.ports = ports;
return this;
}
@Override
public Denied set(String fieldName, Object value) {
return (Denied) super.set(fieldName, value);
}
@Override
public Denied clone() {
return (Denied) super.clone();
}
}
}
|
googleapis/google-cloud-java | 36,946 | java-retail/proto-google-cloud-retail-v2beta/src/main/java/com/google/cloud/retail/v2beta/PurgeUserEventsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/retail/v2beta/purge_config.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.retail.v2beta;
/**
*
*
* <pre>
* Request message for PurgeUserEvents method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2beta.PurgeUserEventsRequest}
*/
public final class PurgeUserEventsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.retail.v2beta.PurgeUserEventsRequest)
PurgeUserEventsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use PurgeUserEventsRequest.newBuilder() to construct.
private PurgeUserEventsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private PurgeUserEventsRequest() {
parent_ = "";
filter_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new PurgeUserEventsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2beta.PurgeConfigProto
.internal_static_google_cloud_retail_v2beta_PurgeUserEventsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2beta.PurgeConfigProto
.internal_static_google_cloud_retail_v2beta_PurgeUserEventsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2beta.PurgeUserEventsRequest.class,
com.google.cloud.retail.v2beta.PurgeUserEventsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The resource name of the catalog under which the events are
* created. The format is
* `projects/${projectId}/locations/global/catalogs/${catalogId}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The resource name of the catalog under which the events are
* created. The format is
* `projects/${projectId}/locations/global/catalogs/${catalogId}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Required. The filter string to specify the events to be deleted with a
* length limit of 5,000 characters. Empty string filter is not allowed. The
* eligible fields for filtering are:
*
* * `eventType`: Double quoted
* [UserEvent.event_type][google.cloud.retail.v2beta.UserEvent.event_type]
* string.
* * `eventTime`: in ISO 8601 "zulu" format.
* * `visitorId`: Double quoted string. Specifying this will delete all
* events associated with a visitor.
* * `userId`: Double quoted string. Specifying this will delete all events
* associated with a user.
*
* Examples:
*
* * Deleting all events in a time range:
* `eventTime > "2012-04-23T18:25:43.511Z"
* eventTime < "2012-04-23T18:30:43.511Z"`
* * Deleting specific eventType in time range:
* `eventTime > "2012-04-23T18:25:43.511Z" eventType = "detail-page-view"`
* * Deleting all events for a specific visitor:
* `visitorId = "visitor1024"`
*
* The filtering fields are assumed to have an implicit AND.
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The filter string to specify the events to be deleted with a
* length limit of 5,000 characters. Empty string filter is not allowed. The
* eligible fields for filtering are:
*
* * `eventType`: Double quoted
* [UserEvent.event_type][google.cloud.retail.v2beta.UserEvent.event_type]
* string.
* * `eventTime`: in ISO 8601 "zulu" format.
* * `visitorId`: Double quoted string. Specifying this will delete all
* events associated with a visitor.
* * `userId`: Double quoted string. Specifying this will delete all events
* associated with a user.
*
* Examples:
*
* * Deleting all events in a time range:
* `eventTime > "2012-04-23T18:25:43.511Z"
* eventTime < "2012-04-23T18:30:43.511Z"`
* * Deleting specific eventType in time range:
* `eventTime > "2012-04-23T18:25:43.511Z" eventType = "detail-page-view"`
* * Deleting all events for a specific visitor:
* `visitorId = "visitor1024"`
*
* The filtering fields are assumed to have an implicit AND.
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FORCE_FIELD_NUMBER = 3;
private boolean force_ = false;
/**
*
*
* <pre>
* Actually perform the purge.
* If `force` is set to false, the method will return the expected purge count
* without deleting any user events.
* </pre>
*
* <code>bool force = 3;</code>
*
* @return The force.
*/
@java.lang.Override
public boolean getForce() {
return force_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, filter_);
}
if (force_ != false) {
output.writeBool(3, force_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, filter_);
}
if (force_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, force_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.retail.v2beta.PurgeUserEventsRequest)) {
return super.equals(obj);
}
com.google.cloud.retail.v2beta.PurgeUserEventsRequest other =
(com.google.cloud.retail.v2beta.PurgeUserEventsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (getForce() != other.getForce()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + FORCE_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getForce());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.retail.v2beta.PurgeUserEventsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2beta.PurgeUserEventsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.PurgeUserEventsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2beta.PurgeUserEventsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.PurgeUserEventsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2beta.PurgeUserEventsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.PurgeUserEventsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2beta.PurgeUserEventsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.PurgeUserEventsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2beta.PurgeUserEventsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.PurgeUserEventsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2beta.PurgeUserEventsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.retail.v2beta.PurgeUserEventsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for PurgeUserEvents method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2beta.PurgeUserEventsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.retail.v2beta.PurgeUserEventsRequest)
com.google.cloud.retail.v2beta.PurgeUserEventsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2beta.PurgeConfigProto
.internal_static_google_cloud_retail_v2beta_PurgeUserEventsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2beta.PurgeConfigProto
.internal_static_google_cloud_retail_v2beta_PurgeUserEventsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2beta.PurgeUserEventsRequest.class,
com.google.cloud.retail.v2beta.PurgeUserEventsRequest.Builder.class);
}
// Construct using com.google.cloud.retail.v2beta.PurgeUserEventsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
filter_ = "";
force_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.retail.v2beta.PurgeConfigProto
.internal_static_google_cloud_retail_v2beta_PurgeUserEventsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.retail.v2beta.PurgeUserEventsRequest getDefaultInstanceForType() {
return com.google.cloud.retail.v2beta.PurgeUserEventsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.retail.v2beta.PurgeUserEventsRequest build() {
com.google.cloud.retail.v2beta.PurgeUserEventsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.retail.v2beta.PurgeUserEventsRequest buildPartial() {
com.google.cloud.retail.v2beta.PurgeUserEventsRequest result =
new com.google.cloud.retail.v2beta.PurgeUserEventsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.retail.v2beta.PurgeUserEventsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.force_ = force_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.retail.v2beta.PurgeUserEventsRequest) {
return mergeFrom((com.google.cloud.retail.v2beta.PurgeUserEventsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.retail.v2beta.PurgeUserEventsRequest other) {
if (other == com.google.cloud.retail.v2beta.PurgeUserEventsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getForce() != false) {
setForce(other.getForce());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
force_ = input.readBool();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The resource name of the catalog under which the events are
* created. The format is
* `projects/${projectId}/locations/global/catalogs/${catalogId}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the catalog under which the events are
* created. The format is
* `projects/${projectId}/locations/global/catalogs/${catalogId}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the catalog under which the events are
* created. The format is
* `projects/${projectId}/locations/global/catalogs/${catalogId}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the catalog under which the events are
* created. The format is
* `projects/${projectId}/locations/global/catalogs/${catalogId}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the catalog under which the events are
* created. The format is
* `projects/${projectId}/locations/global/catalogs/${catalogId}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Required. The filter string to specify the events to be deleted with a
* length limit of 5,000 characters. Empty string filter is not allowed. The
* eligible fields for filtering are:
*
* * `eventType`: Double quoted
* [UserEvent.event_type][google.cloud.retail.v2beta.UserEvent.event_type]
* string.
* * `eventTime`: in ISO 8601 "zulu" format.
* * `visitorId`: Double quoted string. Specifying this will delete all
* events associated with a visitor.
* * `userId`: Double quoted string. Specifying this will delete all events
* associated with a user.
*
* Examples:
*
* * Deleting all events in a time range:
* `eventTime > "2012-04-23T18:25:43.511Z"
* eventTime < "2012-04-23T18:30:43.511Z"`
* * Deleting specific eventType in time range:
* `eventTime > "2012-04-23T18:25:43.511Z" eventType = "detail-page-view"`
* * Deleting all events for a specific visitor:
* `visitorId = "visitor1024"`
*
* The filtering fields are assumed to have an implicit AND.
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The filter string to specify the events to be deleted with a
* length limit of 5,000 characters. Empty string filter is not allowed. The
* eligible fields for filtering are:
*
* * `eventType`: Double quoted
* [UserEvent.event_type][google.cloud.retail.v2beta.UserEvent.event_type]
* string.
* * `eventTime`: in ISO 8601 "zulu" format.
* * `visitorId`: Double quoted string. Specifying this will delete all
* events associated with a visitor.
* * `userId`: Double quoted string. Specifying this will delete all events
* associated with a user.
*
* Examples:
*
* * Deleting all events in a time range:
* `eventTime > "2012-04-23T18:25:43.511Z"
* eventTime < "2012-04-23T18:30:43.511Z"`
* * Deleting specific eventType in time range:
* `eventTime > "2012-04-23T18:25:43.511Z" eventType = "detail-page-view"`
* * Deleting all events for a specific visitor:
* `visitorId = "visitor1024"`
*
* The filtering fields are assumed to have an implicit AND.
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The filter string to specify the events to be deleted with a
* length limit of 5,000 characters. Empty string filter is not allowed. The
* eligible fields for filtering are:
*
* * `eventType`: Double quoted
* [UserEvent.event_type][google.cloud.retail.v2beta.UserEvent.event_type]
* string.
* * `eventTime`: in ISO 8601 "zulu" format.
* * `visitorId`: Double quoted string. Specifying this will delete all
* events associated with a visitor.
* * `userId`: Double quoted string. Specifying this will delete all events
* associated with a user.
*
* Examples:
*
* * Deleting all events in a time range:
* `eventTime > "2012-04-23T18:25:43.511Z"
* eventTime < "2012-04-23T18:30:43.511Z"`
* * Deleting specific eventType in time range:
* `eventTime > "2012-04-23T18:25:43.511Z" eventType = "detail-page-view"`
* * Deleting all events for a specific visitor:
* `visitorId = "visitor1024"`
*
* The filtering fields are assumed to have an implicit AND.
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The filter string to specify the events to be deleted with a
* length limit of 5,000 characters. Empty string filter is not allowed. The
* eligible fields for filtering are:
*
* * `eventType`: Double quoted
* [UserEvent.event_type][google.cloud.retail.v2beta.UserEvent.event_type]
* string.
* * `eventTime`: in ISO 8601 "zulu" format.
* * `visitorId`: Double quoted string. Specifying this will delete all
* events associated with a visitor.
* * `userId`: Double quoted string. Specifying this will delete all events
* associated with a user.
*
* Examples:
*
* * Deleting all events in a time range:
* `eventTime > "2012-04-23T18:25:43.511Z"
* eventTime < "2012-04-23T18:30:43.511Z"`
* * Deleting specific eventType in time range:
* `eventTime > "2012-04-23T18:25:43.511Z" eventType = "detail-page-view"`
* * Deleting all events for a specific visitor:
* `visitorId = "visitor1024"`
*
* The filtering fields are assumed to have an implicit AND.
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The filter string to specify the events to be deleted with a
* length limit of 5,000 characters. Empty string filter is not allowed. The
* eligible fields for filtering are:
*
* * `eventType`: Double quoted
* [UserEvent.event_type][google.cloud.retail.v2beta.UserEvent.event_type]
* string.
* * `eventTime`: in ISO 8601 "zulu" format.
* * `visitorId`: Double quoted string. Specifying this will delete all
* events associated with a visitor.
* * `userId`: Double quoted string. Specifying this will delete all events
* associated with a user.
*
* Examples:
*
* * Deleting all events in a time range:
* `eventTime > "2012-04-23T18:25:43.511Z"
* eventTime < "2012-04-23T18:30:43.511Z"`
* * Deleting specific eventType in time range:
* `eventTime > "2012-04-23T18:25:43.511Z" eventType = "detail-page-view"`
* * Deleting all events for a specific visitor:
* `visitorId = "visitor1024"`
*
* The filtering fields are assumed to have an implicit AND.
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private boolean force_;
/**
*
*
* <pre>
* Actually perform the purge.
* If `force` is set to false, the method will return the expected purge count
* without deleting any user events.
* </pre>
*
* <code>bool force = 3;</code>
*
* @return The force.
*/
@java.lang.Override
public boolean getForce() {
return force_;
}
/**
*
*
* <pre>
* Actually perform the purge.
* If `force` is set to false, the method will return the expected purge count
* without deleting any user events.
* </pre>
*
* <code>bool force = 3;</code>
*
* @param value The force to set.
* @return This builder for chaining.
*/
public Builder setForce(boolean value) {
force_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Actually perform the purge.
* If `force` is set to false, the method will return the expected purge count
* without deleting any user events.
* </pre>
*
* <code>bool force = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearForce() {
bitField0_ = (bitField0_ & ~0x00000004);
force_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.retail.v2beta.PurgeUserEventsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.retail.v2beta.PurgeUserEventsRequest)
private static final com.google.cloud.retail.v2beta.PurgeUserEventsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.retail.v2beta.PurgeUserEventsRequest();
}
public static com.google.cloud.retail.v2beta.PurgeUserEventsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<PurgeUserEventsRequest> PARSER =
new com.google.protobuf.AbstractParser<PurgeUserEventsRequest>() {
@java.lang.Override
public PurgeUserEventsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<PurgeUserEventsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<PurgeUserEventsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.retail.v2beta.PurgeUserEventsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,946 | java-contact-center-insights/proto-google-cloud-contact-center-insights-v1/src/main/java/com/google/cloud/contactcenterinsights/v1/IssueModelResult.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/contactcenterinsights/v1/resources.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.contactcenterinsights.v1;
/**
*
*
* <pre>
* Issue Modeling result on a conversation.
* </pre>
*
* Protobuf type {@code google.cloud.contactcenterinsights.v1.IssueModelResult}
*/
public final class IssueModelResult extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.contactcenterinsights.v1.IssueModelResult)
IssueModelResultOrBuilder {
private static final long serialVersionUID = 0L;
// Use IssueModelResult.newBuilder() to construct.
private IssueModelResult(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private IssueModelResult() {
issueModel_ = "";
issues_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new IssueModelResult();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.contactcenterinsights.v1.ResourcesProto
.internal_static_google_cloud_contactcenterinsights_v1_IssueModelResult_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.contactcenterinsights.v1.ResourcesProto
.internal_static_google_cloud_contactcenterinsights_v1_IssueModelResult_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.contactcenterinsights.v1.IssueModelResult.class,
com.google.cloud.contactcenterinsights.v1.IssueModelResult.Builder.class);
}
public static final int ISSUE_MODEL_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object issueModel_ = "";
/**
*
*
* <pre>
* Issue model that generates the result.
* Format: projects/{project}/locations/{location}/issueModels/{issue_model}
* </pre>
*
* <code>string issue_model = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return The issueModel.
*/
@java.lang.Override
public java.lang.String getIssueModel() {
java.lang.Object ref = issueModel_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
issueModel_ = s;
return s;
}
}
/**
*
*
* <pre>
* Issue model that generates the result.
* Format: projects/{project}/locations/{location}/issueModels/{issue_model}
* </pre>
*
* <code>string issue_model = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return The bytes for issueModel.
*/
@java.lang.Override
public com.google.protobuf.ByteString getIssueModelBytes() {
java.lang.Object ref = issueModel_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
issueModel_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ISSUES_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.contactcenterinsights.v1.IssueAssignment> issues_;
/**
*
*
* <pre>
* All the matched issues.
* </pre>
*
* <code>repeated .google.cloud.contactcenterinsights.v1.IssueAssignment issues = 2;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.contactcenterinsights.v1.IssueAssignment> getIssuesList() {
return issues_;
}
/**
*
*
* <pre>
* All the matched issues.
* </pre>
*
* <code>repeated .google.cloud.contactcenterinsights.v1.IssueAssignment issues = 2;</code>
*/
@java.lang.Override
public java.util.List<
? extends com.google.cloud.contactcenterinsights.v1.IssueAssignmentOrBuilder>
getIssuesOrBuilderList() {
return issues_;
}
/**
*
*
* <pre>
* All the matched issues.
* </pre>
*
* <code>repeated .google.cloud.contactcenterinsights.v1.IssueAssignment issues = 2;</code>
*/
@java.lang.Override
public int getIssuesCount() {
return issues_.size();
}
/**
*
*
* <pre>
* All the matched issues.
* </pre>
*
* <code>repeated .google.cloud.contactcenterinsights.v1.IssueAssignment issues = 2;</code>
*/
@java.lang.Override
public com.google.cloud.contactcenterinsights.v1.IssueAssignment getIssues(int index) {
return issues_.get(index);
}
/**
*
*
* <pre>
* All the matched issues.
* </pre>
*
* <code>repeated .google.cloud.contactcenterinsights.v1.IssueAssignment issues = 2;</code>
*/
@java.lang.Override
public com.google.cloud.contactcenterinsights.v1.IssueAssignmentOrBuilder getIssuesOrBuilder(
int index) {
return issues_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(issueModel_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, issueModel_);
}
for (int i = 0; i < issues_.size(); i++) {
output.writeMessage(2, issues_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(issueModel_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, issueModel_);
}
for (int i = 0; i < issues_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, issues_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.contactcenterinsights.v1.IssueModelResult)) {
return super.equals(obj);
}
com.google.cloud.contactcenterinsights.v1.IssueModelResult other =
(com.google.cloud.contactcenterinsights.v1.IssueModelResult) obj;
if (!getIssueModel().equals(other.getIssueModel())) return false;
if (!getIssuesList().equals(other.getIssuesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + ISSUE_MODEL_FIELD_NUMBER;
hash = (53 * hash) + getIssueModel().hashCode();
if (getIssuesCount() > 0) {
hash = (37 * hash) + ISSUES_FIELD_NUMBER;
hash = (53 * hash) + getIssuesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.contactcenterinsights.v1.IssueModelResult parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.contactcenterinsights.v1.IssueModelResult parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.contactcenterinsights.v1.IssueModelResult parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.contactcenterinsights.v1.IssueModelResult parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.contactcenterinsights.v1.IssueModelResult parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.contactcenterinsights.v1.IssueModelResult parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.contactcenterinsights.v1.IssueModelResult parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.contactcenterinsights.v1.IssueModelResult parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.contactcenterinsights.v1.IssueModelResult parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.contactcenterinsights.v1.IssueModelResult parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.contactcenterinsights.v1.IssueModelResult parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.contactcenterinsights.v1.IssueModelResult parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.contactcenterinsights.v1.IssueModelResult prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Issue Modeling result on a conversation.
* </pre>
*
* Protobuf type {@code google.cloud.contactcenterinsights.v1.IssueModelResult}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.contactcenterinsights.v1.IssueModelResult)
com.google.cloud.contactcenterinsights.v1.IssueModelResultOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.contactcenterinsights.v1.ResourcesProto
.internal_static_google_cloud_contactcenterinsights_v1_IssueModelResult_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.contactcenterinsights.v1.ResourcesProto
.internal_static_google_cloud_contactcenterinsights_v1_IssueModelResult_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.contactcenterinsights.v1.IssueModelResult.class,
com.google.cloud.contactcenterinsights.v1.IssueModelResult.Builder.class);
}
// Construct using com.google.cloud.contactcenterinsights.v1.IssueModelResult.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
issueModel_ = "";
if (issuesBuilder_ == null) {
issues_ = java.util.Collections.emptyList();
} else {
issues_ = null;
issuesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.contactcenterinsights.v1.ResourcesProto
.internal_static_google_cloud_contactcenterinsights_v1_IssueModelResult_descriptor;
}
@java.lang.Override
public com.google.cloud.contactcenterinsights.v1.IssueModelResult getDefaultInstanceForType() {
return com.google.cloud.contactcenterinsights.v1.IssueModelResult.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.contactcenterinsights.v1.IssueModelResult build() {
com.google.cloud.contactcenterinsights.v1.IssueModelResult result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.contactcenterinsights.v1.IssueModelResult buildPartial() {
com.google.cloud.contactcenterinsights.v1.IssueModelResult result =
new com.google.cloud.contactcenterinsights.v1.IssueModelResult(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.contactcenterinsights.v1.IssueModelResult result) {
if (issuesBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
issues_ = java.util.Collections.unmodifiableList(issues_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.issues_ = issues_;
} else {
result.issues_ = issuesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.contactcenterinsights.v1.IssueModelResult result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.issueModel_ = issueModel_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.contactcenterinsights.v1.IssueModelResult) {
return mergeFrom((com.google.cloud.contactcenterinsights.v1.IssueModelResult) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.contactcenterinsights.v1.IssueModelResult other) {
if (other == com.google.cloud.contactcenterinsights.v1.IssueModelResult.getDefaultInstance())
return this;
if (!other.getIssueModel().isEmpty()) {
issueModel_ = other.issueModel_;
bitField0_ |= 0x00000001;
onChanged();
}
if (issuesBuilder_ == null) {
if (!other.issues_.isEmpty()) {
if (issues_.isEmpty()) {
issues_ = other.issues_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureIssuesIsMutable();
issues_.addAll(other.issues_);
}
onChanged();
}
} else {
if (!other.issues_.isEmpty()) {
if (issuesBuilder_.isEmpty()) {
issuesBuilder_.dispose();
issuesBuilder_ = null;
issues_ = other.issues_;
bitField0_ = (bitField0_ & ~0x00000002);
issuesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getIssuesFieldBuilder()
: null;
} else {
issuesBuilder_.addAllMessages(other.issues_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
issueModel_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
com.google.cloud.contactcenterinsights.v1.IssueAssignment m =
input.readMessage(
com.google.cloud.contactcenterinsights.v1.IssueAssignment.parser(),
extensionRegistry);
if (issuesBuilder_ == null) {
ensureIssuesIsMutable();
issues_.add(m);
} else {
issuesBuilder_.addMessage(m);
}
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object issueModel_ = "";
/**
*
*
* <pre>
* Issue model that generates the result.
* Format: projects/{project}/locations/{location}/issueModels/{issue_model}
* </pre>
*
* <code>string issue_model = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return The issueModel.
*/
public java.lang.String getIssueModel() {
java.lang.Object ref = issueModel_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
issueModel_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Issue model that generates the result.
* Format: projects/{project}/locations/{location}/issueModels/{issue_model}
* </pre>
*
* <code>string issue_model = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return The bytes for issueModel.
*/
public com.google.protobuf.ByteString getIssueModelBytes() {
java.lang.Object ref = issueModel_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
issueModel_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Issue model that generates the result.
* Format: projects/{project}/locations/{location}/issueModels/{issue_model}
* </pre>
*
* <code>string issue_model = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @param value The issueModel to set.
* @return This builder for chaining.
*/
public Builder setIssueModel(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
issueModel_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Issue model that generates the result.
* Format: projects/{project}/locations/{location}/issueModels/{issue_model}
* </pre>
*
* <code>string issue_model = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return This builder for chaining.
*/
public Builder clearIssueModel() {
issueModel_ = getDefaultInstance().getIssueModel();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Issue model that generates the result.
* Format: projects/{project}/locations/{location}/issueModels/{issue_model}
* </pre>
*
* <code>string issue_model = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @param value The bytes for issueModel to set.
* @return This builder for chaining.
*/
public Builder setIssueModelBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
issueModel_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.util.List<com.google.cloud.contactcenterinsights.v1.IssueAssignment> issues_ =
java.util.Collections.emptyList();
private void ensureIssuesIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
issues_ =
new java.util.ArrayList<com.google.cloud.contactcenterinsights.v1.IssueAssignment>(
issues_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.contactcenterinsights.v1.IssueAssignment,
com.google.cloud.contactcenterinsights.v1.IssueAssignment.Builder,
com.google.cloud.contactcenterinsights.v1.IssueAssignmentOrBuilder>
issuesBuilder_;
/**
*
*
* <pre>
* All the matched issues.
* </pre>
*
* <code>repeated .google.cloud.contactcenterinsights.v1.IssueAssignment issues = 2;</code>
*/
public java.util.List<com.google.cloud.contactcenterinsights.v1.IssueAssignment>
getIssuesList() {
if (issuesBuilder_ == null) {
return java.util.Collections.unmodifiableList(issues_);
} else {
return issuesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* All the matched issues.
* </pre>
*
* <code>repeated .google.cloud.contactcenterinsights.v1.IssueAssignment issues = 2;</code>
*/
public int getIssuesCount() {
if (issuesBuilder_ == null) {
return issues_.size();
} else {
return issuesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* All the matched issues.
* </pre>
*
* <code>repeated .google.cloud.contactcenterinsights.v1.IssueAssignment issues = 2;</code>
*/
public com.google.cloud.contactcenterinsights.v1.IssueAssignment getIssues(int index) {
if (issuesBuilder_ == null) {
return issues_.get(index);
} else {
return issuesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* All the matched issues.
* </pre>
*
* <code>repeated .google.cloud.contactcenterinsights.v1.IssueAssignment issues = 2;</code>
*/
public Builder setIssues(
int index, com.google.cloud.contactcenterinsights.v1.IssueAssignment value) {
if (issuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIssuesIsMutable();
issues_.set(index, value);
onChanged();
} else {
issuesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All the matched issues.
* </pre>
*
* <code>repeated .google.cloud.contactcenterinsights.v1.IssueAssignment issues = 2;</code>
*/
public Builder setIssues(
int index,
com.google.cloud.contactcenterinsights.v1.IssueAssignment.Builder builderForValue) {
if (issuesBuilder_ == null) {
ensureIssuesIsMutable();
issues_.set(index, builderForValue.build());
onChanged();
} else {
issuesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the matched issues.
* </pre>
*
* <code>repeated .google.cloud.contactcenterinsights.v1.IssueAssignment issues = 2;</code>
*/
public Builder addIssues(com.google.cloud.contactcenterinsights.v1.IssueAssignment value) {
if (issuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIssuesIsMutable();
issues_.add(value);
onChanged();
} else {
issuesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* All the matched issues.
* </pre>
*
* <code>repeated .google.cloud.contactcenterinsights.v1.IssueAssignment issues = 2;</code>
*/
public Builder addIssues(
int index, com.google.cloud.contactcenterinsights.v1.IssueAssignment value) {
if (issuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIssuesIsMutable();
issues_.add(index, value);
onChanged();
} else {
issuesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All the matched issues.
* </pre>
*
* <code>repeated .google.cloud.contactcenterinsights.v1.IssueAssignment issues = 2;</code>
*/
public Builder addIssues(
com.google.cloud.contactcenterinsights.v1.IssueAssignment.Builder builderForValue) {
if (issuesBuilder_ == null) {
ensureIssuesIsMutable();
issues_.add(builderForValue.build());
onChanged();
} else {
issuesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the matched issues.
* </pre>
*
* <code>repeated .google.cloud.contactcenterinsights.v1.IssueAssignment issues = 2;</code>
*/
public Builder addIssues(
int index,
com.google.cloud.contactcenterinsights.v1.IssueAssignment.Builder builderForValue) {
if (issuesBuilder_ == null) {
ensureIssuesIsMutable();
issues_.add(index, builderForValue.build());
onChanged();
} else {
issuesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the matched issues.
* </pre>
*
* <code>repeated .google.cloud.contactcenterinsights.v1.IssueAssignment issues = 2;</code>
*/
public Builder addAllIssues(
java.lang.Iterable<? extends com.google.cloud.contactcenterinsights.v1.IssueAssignment>
values) {
if (issuesBuilder_ == null) {
ensureIssuesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, issues_);
onChanged();
} else {
issuesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* All the matched issues.
* </pre>
*
* <code>repeated .google.cloud.contactcenterinsights.v1.IssueAssignment issues = 2;</code>
*/
public Builder clearIssues() {
if (issuesBuilder_ == null) {
issues_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
issuesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* All the matched issues.
* </pre>
*
* <code>repeated .google.cloud.contactcenterinsights.v1.IssueAssignment issues = 2;</code>
*/
public Builder removeIssues(int index) {
if (issuesBuilder_ == null) {
ensureIssuesIsMutable();
issues_.remove(index);
onChanged();
} else {
issuesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* All the matched issues.
* </pre>
*
* <code>repeated .google.cloud.contactcenterinsights.v1.IssueAssignment issues = 2;</code>
*/
public com.google.cloud.contactcenterinsights.v1.IssueAssignment.Builder getIssuesBuilder(
int index) {
return getIssuesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* All the matched issues.
* </pre>
*
* <code>repeated .google.cloud.contactcenterinsights.v1.IssueAssignment issues = 2;</code>
*/
public com.google.cloud.contactcenterinsights.v1.IssueAssignmentOrBuilder getIssuesOrBuilder(
int index) {
if (issuesBuilder_ == null) {
return issues_.get(index);
} else {
return issuesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* All the matched issues.
* </pre>
*
* <code>repeated .google.cloud.contactcenterinsights.v1.IssueAssignment issues = 2;</code>
*/
public java.util.List<
? extends com.google.cloud.contactcenterinsights.v1.IssueAssignmentOrBuilder>
getIssuesOrBuilderList() {
if (issuesBuilder_ != null) {
return issuesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(issues_);
}
}
/**
*
*
* <pre>
* All the matched issues.
* </pre>
*
* <code>repeated .google.cloud.contactcenterinsights.v1.IssueAssignment issues = 2;</code>
*/
public com.google.cloud.contactcenterinsights.v1.IssueAssignment.Builder addIssuesBuilder() {
return getIssuesFieldBuilder()
.addBuilder(
com.google.cloud.contactcenterinsights.v1.IssueAssignment.getDefaultInstance());
}
/**
*
*
* <pre>
* All the matched issues.
* </pre>
*
* <code>repeated .google.cloud.contactcenterinsights.v1.IssueAssignment issues = 2;</code>
*/
public com.google.cloud.contactcenterinsights.v1.IssueAssignment.Builder addIssuesBuilder(
int index) {
return getIssuesFieldBuilder()
.addBuilder(
index,
com.google.cloud.contactcenterinsights.v1.IssueAssignment.getDefaultInstance());
}
/**
*
*
* <pre>
* All the matched issues.
* </pre>
*
* <code>repeated .google.cloud.contactcenterinsights.v1.IssueAssignment issues = 2;</code>
*/
public java.util.List<com.google.cloud.contactcenterinsights.v1.IssueAssignment.Builder>
getIssuesBuilderList() {
return getIssuesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.contactcenterinsights.v1.IssueAssignment,
com.google.cloud.contactcenterinsights.v1.IssueAssignment.Builder,
com.google.cloud.contactcenterinsights.v1.IssueAssignmentOrBuilder>
getIssuesFieldBuilder() {
if (issuesBuilder_ == null) {
issuesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.contactcenterinsights.v1.IssueAssignment,
com.google.cloud.contactcenterinsights.v1.IssueAssignment.Builder,
com.google.cloud.contactcenterinsights.v1.IssueAssignmentOrBuilder>(
issues_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean());
issues_ = null;
}
return issuesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.contactcenterinsights.v1.IssueModelResult)
}
// @@protoc_insertion_point(class_scope:google.cloud.contactcenterinsights.v1.IssueModelResult)
private static final com.google.cloud.contactcenterinsights.v1.IssueModelResult DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.contactcenterinsights.v1.IssueModelResult();
}
public static com.google.cloud.contactcenterinsights.v1.IssueModelResult getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<IssueModelResult> PARSER =
new com.google.protobuf.AbstractParser<IssueModelResult>() {
@java.lang.Override
public IssueModelResult parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<IssueModelResult> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<IssueModelResult> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.contactcenterinsights.v1.IssueModelResult getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,951 | java-dlp/proto-google-cloud-dlp-v2/src/main/java/com/google/privacy/dlp/v2/UpdateJobTriggerRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/privacy/dlp/v2/dlp.proto
// Protobuf Java Version: 3.25.8
package com.google.privacy.dlp.v2;
/**
*
*
* <pre>
* Request message for UpdateJobTrigger.
* </pre>
*
* Protobuf type {@code google.privacy.dlp.v2.UpdateJobTriggerRequest}
*/
public final class UpdateJobTriggerRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.privacy.dlp.v2.UpdateJobTriggerRequest)
UpdateJobTriggerRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateJobTriggerRequest.newBuilder() to construct.
private UpdateJobTriggerRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateJobTriggerRequest() {
name_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateJobTriggerRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_UpdateJobTriggerRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_UpdateJobTriggerRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.privacy.dlp.v2.UpdateJobTriggerRequest.class,
com.google.privacy.dlp.v2.UpdateJobTriggerRequest.Builder.class);
}
private int bitField0_;
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. Resource name of the project and the triggeredJob, for example
* `projects/dlp-test-project/jobTriggers/53234423`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Resource name of the project and the triggeredJob, for example
* `projects/dlp-test-project/jobTriggers/53234423`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int JOB_TRIGGER_FIELD_NUMBER = 2;
private com.google.privacy.dlp.v2.JobTrigger jobTrigger_;
/**
*
*
* <pre>
* New JobTrigger value.
* </pre>
*
* <code>.google.privacy.dlp.v2.JobTrigger job_trigger = 2;</code>
*
* @return Whether the jobTrigger field is set.
*/
@java.lang.Override
public boolean hasJobTrigger() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* New JobTrigger value.
* </pre>
*
* <code>.google.privacy.dlp.v2.JobTrigger job_trigger = 2;</code>
*
* @return The jobTrigger.
*/
@java.lang.Override
public com.google.privacy.dlp.v2.JobTrigger getJobTrigger() {
return jobTrigger_ == null
? com.google.privacy.dlp.v2.JobTrigger.getDefaultInstance()
: jobTrigger_;
}
/**
*
*
* <pre>
* New JobTrigger value.
* </pre>
*
* <code>.google.privacy.dlp.v2.JobTrigger job_trigger = 2;</code>
*/
@java.lang.Override
public com.google.privacy.dlp.v2.JobTriggerOrBuilder getJobTriggerOrBuilder() {
return jobTrigger_ == null
? com.google.privacy.dlp.v2.JobTrigger.getDefaultInstance()
: jobTrigger_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 3;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getJobTrigger());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(3, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getJobTrigger());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.privacy.dlp.v2.UpdateJobTriggerRequest)) {
return super.equals(obj);
}
com.google.privacy.dlp.v2.UpdateJobTriggerRequest other =
(com.google.privacy.dlp.v2.UpdateJobTriggerRequest) obj;
if (!getName().equals(other.getName())) return false;
if (hasJobTrigger() != other.hasJobTrigger()) return false;
if (hasJobTrigger()) {
if (!getJobTrigger().equals(other.getJobTrigger())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
if (hasJobTrigger()) {
hash = (37 * hash) + JOB_TRIGGER_FIELD_NUMBER;
hash = (53 * hash) + getJobTrigger().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.privacy.dlp.v2.UpdateJobTriggerRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.UpdateJobTriggerRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.UpdateJobTriggerRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.UpdateJobTriggerRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.UpdateJobTriggerRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.UpdateJobTriggerRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.UpdateJobTriggerRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.UpdateJobTriggerRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.privacy.dlp.v2.UpdateJobTriggerRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.UpdateJobTriggerRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.privacy.dlp.v2.UpdateJobTriggerRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.UpdateJobTriggerRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.privacy.dlp.v2.UpdateJobTriggerRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for UpdateJobTrigger.
* </pre>
*
* Protobuf type {@code google.privacy.dlp.v2.UpdateJobTriggerRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.privacy.dlp.v2.UpdateJobTriggerRequest)
com.google.privacy.dlp.v2.UpdateJobTriggerRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_UpdateJobTriggerRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_UpdateJobTriggerRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.privacy.dlp.v2.UpdateJobTriggerRequest.class,
com.google.privacy.dlp.v2.UpdateJobTriggerRequest.Builder.class);
}
// Construct using com.google.privacy.dlp.v2.UpdateJobTriggerRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getJobTriggerFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
jobTrigger_ = null;
if (jobTriggerBuilder_ != null) {
jobTriggerBuilder_.dispose();
jobTriggerBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_UpdateJobTriggerRequest_descriptor;
}
@java.lang.Override
public com.google.privacy.dlp.v2.UpdateJobTriggerRequest getDefaultInstanceForType() {
return com.google.privacy.dlp.v2.UpdateJobTriggerRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.privacy.dlp.v2.UpdateJobTriggerRequest build() {
com.google.privacy.dlp.v2.UpdateJobTriggerRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.privacy.dlp.v2.UpdateJobTriggerRequest buildPartial() {
com.google.privacy.dlp.v2.UpdateJobTriggerRequest result =
new com.google.privacy.dlp.v2.UpdateJobTriggerRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.privacy.dlp.v2.UpdateJobTriggerRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.jobTrigger_ = jobTriggerBuilder_ == null ? jobTrigger_ : jobTriggerBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.privacy.dlp.v2.UpdateJobTriggerRequest) {
return mergeFrom((com.google.privacy.dlp.v2.UpdateJobTriggerRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.privacy.dlp.v2.UpdateJobTriggerRequest other) {
if (other == com.google.privacy.dlp.v2.UpdateJobTriggerRequest.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasJobTrigger()) {
mergeJobTrigger(other.getJobTrigger());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getJobTriggerFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. Resource name of the project and the triggeredJob, for example
* `projects/dlp-test-project/jobTriggers/53234423`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Resource name of the project and the triggeredJob, for example
* `projects/dlp-test-project/jobTriggers/53234423`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Resource name of the project and the triggeredJob, for example
* `projects/dlp-test-project/jobTriggers/53234423`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Resource name of the project and the triggeredJob, for example
* `projects/dlp-test-project/jobTriggers/53234423`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Resource name of the project and the triggeredJob, for example
* `projects/dlp-test-project/jobTriggers/53234423`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.privacy.dlp.v2.JobTrigger jobTrigger_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.privacy.dlp.v2.JobTrigger,
com.google.privacy.dlp.v2.JobTrigger.Builder,
com.google.privacy.dlp.v2.JobTriggerOrBuilder>
jobTriggerBuilder_;
/**
*
*
* <pre>
* New JobTrigger value.
* </pre>
*
* <code>.google.privacy.dlp.v2.JobTrigger job_trigger = 2;</code>
*
* @return Whether the jobTrigger field is set.
*/
public boolean hasJobTrigger() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* New JobTrigger value.
* </pre>
*
* <code>.google.privacy.dlp.v2.JobTrigger job_trigger = 2;</code>
*
* @return The jobTrigger.
*/
public com.google.privacy.dlp.v2.JobTrigger getJobTrigger() {
if (jobTriggerBuilder_ == null) {
return jobTrigger_ == null
? com.google.privacy.dlp.v2.JobTrigger.getDefaultInstance()
: jobTrigger_;
} else {
return jobTriggerBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* New JobTrigger value.
* </pre>
*
* <code>.google.privacy.dlp.v2.JobTrigger job_trigger = 2;</code>
*/
public Builder setJobTrigger(com.google.privacy.dlp.v2.JobTrigger value) {
if (jobTriggerBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
jobTrigger_ = value;
} else {
jobTriggerBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* New JobTrigger value.
* </pre>
*
* <code>.google.privacy.dlp.v2.JobTrigger job_trigger = 2;</code>
*/
public Builder setJobTrigger(com.google.privacy.dlp.v2.JobTrigger.Builder builderForValue) {
if (jobTriggerBuilder_ == null) {
jobTrigger_ = builderForValue.build();
} else {
jobTriggerBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* New JobTrigger value.
* </pre>
*
* <code>.google.privacy.dlp.v2.JobTrigger job_trigger = 2;</code>
*/
public Builder mergeJobTrigger(com.google.privacy.dlp.v2.JobTrigger value) {
if (jobTriggerBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& jobTrigger_ != null
&& jobTrigger_ != com.google.privacy.dlp.v2.JobTrigger.getDefaultInstance()) {
getJobTriggerBuilder().mergeFrom(value);
} else {
jobTrigger_ = value;
}
} else {
jobTriggerBuilder_.mergeFrom(value);
}
if (jobTrigger_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* New JobTrigger value.
* </pre>
*
* <code>.google.privacy.dlp.v2.JobTrigger job_trigger = 2;</code>
*/
public Builder clearJobTrigger() {
bitField0_ = (bitField0_ & ~0x00000002);
jobTrigger_ = null;
if (jobTriggerBuilder_ != null) {
jobTriggerBuilder_.dispose();
jobTriggerBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* New JobTrigger value.
* </pre>
*
* <code>.google.privacy.dlp.v2.JobTrigger job_trigger = 2;</code>
*/
public com.google.privacy.dlp.v2.JobTrigger.Builder getJobTriggerBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getJobTriggerFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* New JobTrigger value.
* </pre>
*
* <code>.google.privacy.dlp.v2.JobTrigger job_trigger = 2;</code>
*/
public com.google.privacy.dlp.v2.JobTriggerOrBuilder getJobTriggerOrBuilder() {
if (jobTriggerBuilder_ != null) {
return jobTriggerBuilder_.getMessageOrBuilder();
} else {
return jobTrigger_ == null
? com.google.privacy.dlp.v2.JobTrigger.getDefaultInstance()
: jobTrigger_;
}
}
/**
*
*
* <pre>
* New JobTrigger value.
* </pre>
*
* <code>.google.privacy.dlp.v2.JobTrigger job_trigger = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.privacy.dlp.v2.JobTrigger,
com.google.privacy.dlp.v2.JobTrigger.Builder,
com.google.privacy.dlp.v2.JobTriggerOrBuilder>
getJobTriggerFieldBuilder() {
if (jobTriggerBuilder_ == null) {
jobTriggerBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.privacy.dlp.v2.JobTrigger,
com.google.privacy.dlp.v2.JobTrigger.Builder,
com.google.privacy.dlp.v2.JobTriggerOrBuilder>(
getJobTrigger(), getParentForChildren(), isClean());
jobTrigger_ = null;
}
return jobTriggerBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000004);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.privacy.dlp.v2.UpdateJobTriggerRequest)
}
// @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.UpdateJobTriggerRequest)
private static final com.google.privacy.dlp.v2.UpdateJobTriggerRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.privacy.dlp.v2.UpdateJobTriggerRequest();
}
public static com.google.privacy.dlp.v2.UpdateJobTriggerRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateJobTriggerRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateJobTriggerRequest>() {
@java.lang.Override
public UpdateJobTriggerRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateJobTriggerRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateJobTriggerRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.privacy.dlp.v2.UpdateJobTriggerRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,046 | java-recommendations-ai/proto-google-cloud-recommendations-ai-v1beta1/src/main/java/com/google/cloud/recommendationengine/v1beta1/CreatePredictionApiKeyRegistrationRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/recommendationengine/v1beta1/prediction_apikey_registry_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.recommendationengine.v1beta1;
/**
*
*
* <pre>
* Request message for the `CreatePredictionApiKeyRegistration` method.
* </pre>
*
* Protobuf type {@code
* google.cloud.recommendationengine.v1beta1.CreatePredictionApiKeyRegistrationRequest}
*/
public final class CreatePredictionApiKeyRegistrationRequest
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.recommendationengine.v1beta1.CreatePredictionApiKeyRegistrationRequest)
CreatePredictionApiKeyRegistrationRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreatePredictionApiKeyRegistrationRequest.newBuilder() to construct.
private CreatePredictionApiKeyRegistrationRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreatePredictionApiKeyRegistrationRequest() {
parent_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreatePredictionApiKeyRegistrationRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.recommendationengine.v1beta1.PredictionApikeyRegistryService
.internal_static_google_cloud_recommendationengine_v1beta1_CreatePredictionApiKeyRegistrationRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.recommendationengine.v1beta1.PredictionApikeyRegistryService
.internal_static_google_cloud_recommendationengine_v1beta1_CreatePredictionApiKeyRegistrationRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.recommendationengine.v1beta1.CreatePredictionApiKeyRegistrationRequest
.class,
com.google.cloud.recommendationengine.v1beta1.CreatePredictionApiKeyRegistrationRequest
.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource path.
* `projects/*/locations/global/catalogs/default_catalog/eventStores/default_event_store`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent resource path.
* `projects/*/locations/global/catalogs/default_catalog/eventStores/default_event_store`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PREDICTION_API_KEY_REGISTRATION_FIELD_NUMBER = 2;
private com.google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration
predictionApiKeyRegistration_;
/**
*
*
* <pre>
* Required. The prediction API key registration.
* </pre>
*
* <code>
* .google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration prediction_api_key_registration = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the predictionApiKeyRegistration field is set.
*/
@java.lang.Override
public boolean hasPredictionApiKeyRegistration() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The prediction API key registration.
* </pre>
*
* <code>
* .google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration prediction_api_key_registration = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The predictionApiKeyRegistration.
*/
@java.lang.Override
public com.google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration
getPredictionApiKeyRegistration() {
return predictionApiKeyRegistration_ == null
? com.google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration
.getDefaultInstance()
: predictionApiKeyRegistration_;
}
/**
*
*
* <pre>
* Required. The prediction API key registration.
* </pre>
*
* <code>
* .google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration prediction_api_key_registration = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistrationOrBuilder
getPredictionApiKeyRegistrationOrBuilder() {
return predictionApiKeyRegistration_ == null
? com.google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration
.getDefaultInstance()
: predictionApiKeyRegistration_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getPredictionApiKeyRegistration());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
2, getPredictionApiKeyRegistration());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof
com.google.cloud.recommendationengine.v1beta1.CreatePredictionApiKeyRegistrationRequest)) {
return super.equals(obj);
}
com.google.cloud.recommendationengine.v1beta1.CreatePredictionApiKeyRegistrationRequest other =
(com.google.cloud.recommendationengine.v1beta1.CreatePredictionApiKeyRegistrationRequest)
obj;
if (!getParent().equals(other.getParent())) return false;
if (hasPredictionApiKeyRegistration() != other.hasPredictionApiKeyRegistration()) return false;
if (hasPredictionApiKeyRegistration()) {
if (!getPredictionApiKeyRegistration().equals(other.getPredictionApiKeyRegistration()))
return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasPredictionApiKeyRegistration()) {
hash = (37 * hash) + PREDICTION_API_KEY_REGISTRATION_FIELD_NUMBER;
hash = (53 * hash) + getPredictionApiKeyRegistration().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.recommendationengine.v1beta1
.CreatePredictionApiKeyRegistrationRequest
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.recommendationengine.v1beta1
.CreatePredictionApiKeyRegistrationRequest
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.recommendationengine.v1beta1
.CreatePredictionApiKeyRegistrationRequest
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.recommendationengine.v1beta1
.CreatePredictionApiKeyRegistrationRequest
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.recommendationengine.v1beta1
.CreatePredictionApiKeyRegistrationRequest
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.recommendationengine.v1beta1
.CreatePredictionApiKeyRegistrationRequest
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.recommendationengine.v1beta1
.CreatePredictionApiKeyRegistrationRequest
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.recommendationengine.v1beta1
.CreatePredictionApiKeyRegistrationRequest
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.recommendationengine.v1beta1
.CreatePredictionApiKeyRegistrationRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.recommendationengine.v1beta1
.CreatePredictionApiKeyRegistrationRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.recommendationengine.v1beta1
.CreatePredictionApiKeyRegistrationRequest
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.recommendationengine.v1beta1
.CreatePredictionApiKeyRegistrationRequest
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.recommendationengine.v1beta1.CreatePredictionApiKeyRegistrationRequest
prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for the `CreatePredictionApiKeyRegistration` method.
* </pre>
*
* Protobuf type {@code
* google.cloud.recommendationengine.v1beta1.CreatePredictionApiKeyRegistrationRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.recommendationengine.v1beta1.CreatePredictionApiKeyRegistrationRequest)
com.google.cloud.recommendationengine.v1beta1
.CreatePredictionApiKeyRegistrationRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.recommendationengine.v1beta1.PredictionApikeyRegistryService
.internal_static_google_cloud_recommendationengine_v1beta1_CreatePredictionApiKeyRegistrationRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.recommendationengine.v1beta1.PredictionApikeyRegistryService
.internal_static_google_cloud_recommendationengine_v1beta1_CreatePredictionApiKeyRegistrationRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.recommendationengine.v1beta1
.CreatePredictionApiKeyRegistrationRequest.class,
com.google.cloud.recommendationengine.v1beta1
.CreatePredictionApiKeyRegistrationRequest.Builder.class);
}
// Construct using
// com.google.cloud.recommendationengine.v1beta1.CreatePredictionApiKeyRegistrationRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getPredictionApiKeyRegistrationFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
predictionApiKeyRegistration_ = null;
if (predictionApiKeyRegistrationBuilder_ != null) {
predictionApiKeyRegistrationBuilder_.dispose();
predictionApiKeyRegistrationBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.recommendationengine.v1beta1.PredictionApikeyRegistryService
.internal_static_google_cloud_recommendationengine_v1beta1_CreatePredictionApiKeyRegistrationRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.recommendationengine.v1beta1.CreatePredictionApiKeyRegistrationRequest
getDefaultInstanceForType() {
return com.google.cloud.recommendationengine.v1beta1.CreatePredictionApiKeyRegistrationRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.recommendationengine.v1beta1.CreatePredictionApiKeyRegistrationRequest
build() {
com.google.cloud.recommendationengine.v1beta1.CreatePredictionApiKeyRegistrationRequest
result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.recommendationengine.v1beta1.CreatePredictionApiKeyRegistrationRequest
buildPartial() {
com.google.cloud.recommendationengine.v1beta1.CreatePredictionApiKeyRegistrationRequest
result =
new com.google.cloud.recommendationengine.v1beta1
.CreatePredictionApiKeyRegistrationRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.recommendationengine.v1beta1.CreatePredictionApiKeyRegistrationRequest
result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.predictionApiKeyRegistration_ =
predictionApiKeyRegistrationBuilder_ == null
? predictionApiKeyRegistration_
: predictionApiKeyRegistrationBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof
com.google.cloud.recommendationengine.v1beta1.CreatePredictionApiKeyRegistrationRequest) {
return mergeFrom(
(com.google.cloud.recommendationengine.v1beta1
.CreatePredictionApiKeyRegistrationRequest)
other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.recommendationengine.v1beta1.CreatePredictionApiKeyRegistrationRequest
other) {
if (other
== com.google.cloud.recommendationengine.v1beta1.CreatePredictionApiKeyRegistrationRequest
.getDefaultInstance()) return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasPredictionApiKeyRegistration()) {
mergePredictionApiKeyRegistration(other.getPredictionApiKeyRegistration());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(
getPredictionApiKeyRegistrationFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource path.
* `projects/*/locations/global/catalogs/default_catalog/eventStores/default_event_store`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource path.
* `projects/*/locations/global/catalogs/default_catalog/eventStores/default_event_store`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource path.
* `projects/*/locations/global/catalogs/default_catalog/eventStores/default_event_store`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource path.
* `projects/*/locations/global/catalogs/default_catalog/eventStores/default_event_store`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource path.
* `projects/*/locations/global/catalogs/default_catalog/eventStores/default_event_store`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration
predictionApiKeyRegistration_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration,
com.google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration.Builder,
com.google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistrationOrBuilder>
predictionApiKeyRegistrationBuilder_;
/**
*
*
* <pre>
* Required. The prediction API key registration.
* </pre>
*
* <code>
* .google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration prediction_api_key_registration = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the predictionApiKeyRegistration field is set.
*/
public boolean hasPredictionApiKeyRegistration() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The prediction API key registration.
* </pre>
*
* <code>
* .google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration prediction_api_key_registration = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The predictionApiKeyRegistration.
*/
public com.google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration
getPredictionApiKeyRegistration() {
if (predictionApiKeyRegistrationBuilder_ == null) {
return predictionApiKeyRegistration_ == null
? com.google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration
.getDefaultInstance()
: predictionApiKeyRegistration_;
} else {
return predictionApiKeyRegistrationBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The prediction API key registration.
* </pre>
*
* <code>
* .google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration prediction_api_key_registration = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setPredictionApiKeyRegistration(
com.google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration value) {
if (predictionApiKeyRegistrationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
predictionApiKeyRegistration_ = value;
} else {
predictionApiKeyRegistrationBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The prediction API key registration.
* </pre>
*
* <code>
* .google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration prediction_api_key_registration = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setPredictionApiKeyRegistration(
com.google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration.Builder
builderForValue) {
if (predictionApiKeyRegistrationBuilder_ == null) {
predictionApiKeyRegistration_ = builderForValue.build();
} else {
predictionApiKeyRegistrationBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The prediction API key registration.
* </pre>
*
* <code>
* .google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration prediction_api_key_registration = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergePredictionApiKeyRegistration(
com.google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration value) {
if (predictionApiKeyRegistrationBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& predictionApiKeyRegistration_ != null
&& predictionApiKeyRegistration_
!= com.google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration
.getDefaultInstance()) {
getPredictionApiKeyRegistrationBuilder().mergeFrom(value);
} else {
predictionApiKeyRegistration_ = value;
}
} else {
predictionApiKeyRegistrationBuilder_.mergeFrom(value);
}
if (predictionApiKeyRegistration_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The prediction API key registration.
* </pre>
*
* <code>
* .google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration prediction_api_key_registration = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearPredictionApiKeyRegistration() {
bitField0_ = (bitField0_ & ~0x00000002);
predictionApiKeyRegistration_ = null;
if (predictionApiKeyRegistrationBuilder_ != null) {
predictionApiKeyRegistrationBuilder_.dispose();
predictionApiKeyRegistrationBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The prediction API key registration.
* </pre>
*
* <code>
* .google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration prediction_api_key_registration = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration.Builder
getPredictionApiKeyRegistrationBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getPredictionApiKeyRegistrationFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The prediction API key registration.
* </pre>
*
* <code>
* .google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration prediction_api_key_registration = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistrationOrBuilder
getPredictionApiKeyRegistrationOrBuilder() {
if (predictionApiKeyRegistrationBuilder_ != null) {
return predictionApiKeyRegistrationBuilder_.getMessageOrBuilder();
} else {
return predictionApiKeyRegistration_ == null
? com.google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration
.getDefaultInstance()
: predictionApiKeyRegistration_;
}
}
/**
*
*
* <pre>
* Required. The prediction API key registration.
* </pre>
*
* <code>
* .google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration prediction_api_key_registration = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration,
com.google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration.Builder,
com.google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistrationOrBuilder>
getPredictionApiKeyRegistrationFieldBuilder() {
if (predictionApiKeyRegistrationBuilder_ == null) {
predictionApiKeyRegistrationBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration,
com.google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistration.Builder,
com.google.cloud.recommendationengine.v1beta1
.PredictionApiKeyRegistrationOrBuilder>(
getPredictionApiKeyRegistration(), getParentForChildren(), isClean());
predictionApiKeyRegistration_ = null;
}
return predictionApiKeyRegistrationBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.recommendationengine.v1beta1.CreatePredictionApiKeyRegistrationRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.recommendationengine.v1beta1.CreatePredictionApiKeyRegistrationRequest)
private static final com.google.cloud.recommendationengine.v1beta1
.CreatePredictionApiKeyRegistrationRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.recommendationengine.v1beta1
.CreatePredictionApiKeyRegistrationRequest();
}
public static com.google.cloud.recommendationengine.v1beta1
.CreatePredictionApiKeyRegistrationRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreatePredictionApiKeyRegistrationRequest>
PARSER =
new com.google.protobuf.AbstractParser<CreatePredictionApiKeyRegistrationRequest>() {
@java.lang.Override
public CreatePredictionApiKeyRegistrationRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException()
.setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreatePredictionApiKeyRegistrationRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreatePredictionApiKeyRegistrationRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.recommendationengine.v1beta1.CreatePredictionApiKeyRegistrationRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/maven | 37,344 | api/maven-api-core/src/main/java/org/apache/maven/api/Session.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.maven.api;
import java.nio.file.Path;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Optional;
import org.apache.maven.api.annotations.Experimental;
import org.apache.maven.api.annotations.Nonnull;
import org.apache.maven.api.annotations.Nullable;
import org.apache.maven.api.annotations.ThreadSafe;
import org.apache.maven.api.model.Repository;
import org.apache.maven.api.services.ArtifactCoordinatesFactory;
import org.apache.maven.api.services.DependencyCoordinatesFactory;
import org.apache.maven.api.services.VersionResolverException;
import org.apache.maven.api.settings.Settings;
import org.apache.maven.api.toolchain.ToolchainModel;
/**
* The session to install / deploy / resolve artifacts and dependencies.
*
* @since 4.0.0
*/
@Experimental
@ThreadSafe
public interface Session extends ProtoSession {
/**
* Returns the current maven version.
*
* @return the maven version, never {@code null}
*/
@Nonnull
Version getMavenVersion();
/**
* Retrieves the settings for the current session.
*
* @return the settings instance
*/
@Nonnull
Settings getSettings();
/**
* Retrieves toolchain models that have been explicitly configured.
*
* @return the toolchain models
*/
@Nonnull
Collection<ToolchainModel> getToolchains();
/**
* Retrieves the local repository associated with this session.
*
* @return the local repository instance
*/
@Nonnull
LocalRepository getLocalRepository();
/**
* Retrieves a list of remote repositories associated with this session.
*
* @return a list of remote repositories
*/
@Nonnull
List<RemoteRepository> getRemoteRepositories();
/**
* Retrieves the session data associated with this session.
*
* @return the session data, never {@code null}
*/
@Nonnull
SessionData getData();
/**
* Default implementation at {@link ProtoSession} level, as the notion of project
* does not exist there.
*/
@Nonnull
default Map<String, String> getEffectiveProperties() {
return getEffectiveProperties(null);
}
/**
* Each invocation computes a new map of effective properties. To be used in interpolation.
* <p>
* Effective properties are computed from system, user and optionally project properties, layered with
* defined precedence onto each other to achieve proper precedence. Precedence is defined as:
* <ul>
* <li>System properties (lowest precedence)</li>
* <li>Project properties (optional)</li>
* <li>User properties (highest precedence)</li>
* </ul>
* Note: Project properties contains properties injected from profiles, if applicable. Their precedence is
* {@code profile > project}, hence active profile property may override project property.
* <p>
* The caller of this method should decide whether there is a project in scope (hence, a project instance
* needs to be passed) or not.
*
* @param project {@link Project} or {@code null}.
* @return the effective properties, never {@code null}
*/
@Nonnull
Map<String, String> getEffectiveProperties(@Nullable Project project);
/**
* Returns the degree of concurrency for the build.
*
* @return the degree of concurrency
*/
int getDegreeOfConcurrency();
/**
* Retrieves a list of projects associated with the session.
*
* @return a list of projects, never {@code null}
*/
@Nonnull
List<Project> getProjects();
/**
* Returns the plugin context for mojo being executed and the specified
* {@link Project}, never returns {@code null} as if context not present, creates it.
*
* <strong>Implementation note:</strong> while this method return type is {@link Map}, the
* returned map instance implements {@link java.util.concurrent.ConcurrentMap} as well.
*
* @throws org.apache.maven.api.services.MavenException if not called from the within a mojo execution
*/
@Nonnull
Map<String, Object> getPluginContext(@Nonnull Project project);
/**
* Retrieves the service for the interface
*
* @throws NoSuchElementException if the service could not be found
*/
@Nonnull
<T extends Service> T getService(@Nonnull Class<T> clazz);
/**
* Creates a derived session using the given local repository.
*
* @param localRepository the new local repository
* @return the derived session
* @throws NullPointerException if {@code localRepository} is null
*/
@Nonnull
Session withLocalRepository(@Nonnull LocalRepository localRepository);
/**
* Creates a derived session using the given remote repositories.
*
* @param repositories the new list of remote repositories
* @return the derived session
* @throws NullPointerException if {@code repositories} is null
*/
@Nonnull
Session withRemoteRepositories(@Nonnull List<RemoteRepository> repositories);
/**
* Register the given listener which will receive all events.
*
* @param listener the listener to register
* @throws NullPointerException if {@code listener} is null
*/
void registerListener(@Nonnull Listener listener);
/**
* Unregisters a previously registered listener.
*
* @param listener the listener to unregister
* @throws NullPointerException if {@code listener} is null
*/
void unregisterListener(@Nonnull Listener listener);
/**
* Returns the list of registered listeners.
*
* @return an immutable collection of listeners, never {@code null}
*/
@Nonnull
Collection<Listener> getListeners();
/**
* Shortcut for {@code getService(RepositoryFactory.class).createLocal(...)}.
*
* @param path location of the local repository to create
* @return cache of artifacts downloaded from a remote repository or built locally
*
* @see org.apache.maven.api.services.RepositoryFactory#createLocal(Path)
*/
@Nonnull
LocalRepository createLocalRepository(@Nonnull Path path);
/**
* Shortcut for {@code getService(RepositoryFactory.class).createRemote(...)}.
*
* @param id identifier of the remote repository to create
* @param url location of the remote repository
* @return remote repository that can be used to download or upload artifacts
*
* @see org.apache.maven.api.services.RepositoryFactory#createRemote(String, String)
*/
@Nonnull
RemoteRepository createRemoteRepository(@Nonnull String id, @Nonnull String url);
/**
* Shortcut for {@code getService(RepositoryFactory.class).createRemote(...)}.
*
* @param repository information needed for establishing connections with remote repository
* @return remote repository that can be used to download or upload artifacts
*
* @see org.apache.maven.api.services.RepositoryFactory#createRemote(Repository)
*/
@Nonnull
RemoteRepository createRemoteRepository(@Nonnull Repository repository);
/**
* Creates a coordinates out of string that is formatted like:
* {@code <groupId>:<artifactId>[:<extension>[:<classifier>]]:<version>}.
* <p>
* Shortcut for {@code getService(ArtifactFactory.class).create(...)}.
*
* @param coordsString the string having "standard" coordinates.
* @return coordinates used to point to the artifact
*
* @see ArtifactCoordinatesFactory#create(Session, String)
*/
@Nonnull
ArtifactCoordinates createArtifactCoordinates(@Nonnull String coordsString);
/**
* Shortcut for {@code getService(ArtifactFactory.class).create(...)}.
*
* @param groupId the group identifier, or {@code null} is unspecified
* @param artifactId the artifact identifier, or {@code null} is unspecified
* @param version the artifact version, or {@code null} is unspecified
* @param extension the artifact extension, or {@code null} is unspecified
* @return coordinates used to point to the artifact
*
* @see ArtifactCoordinatesFactory#create(Session, String, String, String, String)
*/
@Nonnull
ArtifactCoordinates createArtifactCoordinates(String groupId, String artifactId, String version, String extension);
/**
* Shortcut for {@code getService(ArtifactFactory.class).create(...)}.
*
* @param groupId the group identifier, or {@code null} is unspecified
* @param artifactId the artifact identifier, or {@code null} is unspecified
* @param version the artifact version, or {@code null} is unspecified
* @param classifier the artifact classifier, or {@code null} is unspecified
* @param extension the artifact extension, or {@code null} is unspecified
* @param type the artifact type, or {@code null} is unspecified
* @return coordinates used to point to the artifact
*
* @see ArtifactCoordinatesFactory#create(Session, String, String, String, String, String, String)
*/
@Nonnull
ArtifactCoordinates createArtifactCoordinates(
String groupId, String artifactId, String version, String classifier, String extension, String type);
/**
* Shortcut for {@code getService(ArtifactFactory.class).create(...)}.
*
* @param artifact artifact from which to get coordinates
* @return coordinates used to point to the artifact
*
* @see ArtifactCoordinatesFactory#create(Session, String, String, String, String, String, String)
*/
@Nonnull
ArtifactCoordinates createArtifactCoordinates(@Nonnull Artifact artifact);
/**
* Shortcut for {@code getService(DependencyFactory.class).create(...)}.
*
* @param coordinates artifact coordinates to get as a dependency coordinates
* @return dependency coordinates for the given artifact
*
* @see DependencyCoordinatesFactory#create(Session, ArtifactCoordinates)
*/
@Nonnull
DependencyCoordinates createDependencyCoordinates(@Nonnull ArtifactCoordinates coordinates);
/**
* Shortcut for {@code getService(DependencyFactory.class).create(...)}.
*
* @param dependency dependency for which to get the coordinates
* @return coordinates for the given dependency
*
* @see DependencyCoordinatesFactory#create(Session, Dependency)
*/
@Nonnull
DependencyCoordinates createDependencyCoordinates(@Nonnull Dependency dependency);
/**
* Shortcut for {@code getService(ArtifactFactory.class).create(...)}.
*
* @param groupId the group identifier, or {@code null} is unspecified
* @param artifactId the artifact identifier, or {@code null} is unspecified
* @param version the artifact version, or {@code null} is unspecified
* @param extension the artifact extension, or {@code null} is unspecified
* @return artifact with the given coordinates
*
* @see org.apache.maven.api.services.ArtifactFactory#create(Session, String, String, String, String)
*/
@Nonnull
Artifact createArtifact(String groupId, String artifactId, String version, String extension);
/**
* Shortcut for {@code getService(ArtifactFactory.class).create(...)}.
*
* @param groupId the group identifier, or {@code null} is unspecified
* @param artifactId the artifact identifier, or {@code null} is unspecified
* @param version the artifact version, or {@code null} is unspecified
* @param classifier the artifact classifier, or {@code null} is unspecified
* @param extension the artifact extension, or {@code null} is unspecified
* @param type the artifact type, or {@code null} is unspecified
* @return artifact with the given coordinates
*
* @see org.apache.maven.api.services.ArtifactFactory#create(Session, String, String, String, String, String, String)
*/
@Nonnull
Artifact createArtifact(
String groupId, String artifactId, String version, String classifier, String extension, String type);
/**
* Shortcut for {@code getService(ArtifactFactory.class).createProduced(...)}.
*
* @param groupId the group identifier, or {@code null} is unspecified
* @param artifactId the artifact identifier, or {@code null} is unspecified
* @param version the artifact version, or {@code null} is unspecified
* @param extension the artifact extension, or {@code null} is unspecified
* @return artifact with the given coordinates
*
* @see org.apache.maven.api.services.ArtifactFactory#createProduced(Session, String, String, String, String)
*/
@Nonnull
ProducedArtifact createProducedArtifact(String groupId, String artifactId, String version, String extension);
/**
* Shortcut for {@code getService(ArtifactFactory.class).createProduced(...)}.
*
* @param groupId the group identifier, or {@code null} is unspecified
* @param artifactId the artifact identifier, or {@code null} is unspecified
* @param version the artifact version, or {@code null} is unspecified
* @param classifier the artifact classifier, or {@code null} is unspecified
* @param extension the artifact extension, or {@code null} is unspecified
* @param type the artifact type, or {@code null} is unspecified
* @return artifact with the given coordinates
*
* @see org.apache.maven.api.services.ArtifactFactory#createProduced(Session, String, String, String, String, String, String)
*/
@Nonnull
ProducedArtifact createProducedArtifact(
String groupId, String artifactId, String version, String classifier, String extension, String type);
/**
* Shortcut for {@code getService(ArtifactResolver.class).resolve(...)}.
*
* @param coordinates coordinates of the artifact to resolve
* @return requested artifact together with the path to its file
* @throws org.apache.maven.api.services.ArtifactResolverException if the artifact resolution failed
*
* @see org.apache.maven.api.services.ArtifactResolver#resolve(Session, Collection)
*/
@Nonnull
DownloadedArtifact resolveArtifact(@Nonnull ArtifactCoordinates coordinates);
/**
* Shortcut for {@code getService(ArtifactResolver.class).resolve(...)}.
*
* @param coordinates coordinates of the artifact to resolve
* @param repositories repositories to use, if {@code null}, the session repositories are used
* @return requested artifact together with the path to its file
* @throws org.apache.maven.api.services.ArtifactResolverException if the artifact resolution failed
*
* @see org.apache.maven.api.services.ArtifactResolver#resolve(Session, Collection)
*/
@Nonnull
DownloadedArtifact resolveArtifact(@Nonnull ArtifactCoordinates coordinates, List<RemoteRepository> repositories);
/**
* Shortcut for {@code getService(ArtifactResolver.class).resolve(...)}.
*
* @param coordinates coordinates of all artifacts to resolve
* @return requested artifacts together with the paths to their files
* @throws org.apache.maven.api.services.ArtifactResolverException if the artifact resolution failed
*
* @see org.apache.maven.api.services.ArtifactResolver#resolve(Session, Collection)
*/
@Nonnull
Collection<DownloadedArtifact> resolveArtifacts(@Nonnull ArtifactCoordinates... coordinates);
/**
* Shortcut for {@code getService(ArtifactResolver.class).resolve(...)}.
*
* @param coordinates coordinates of all artifacts to resolve
* @return requested artifacts together with the paths to their files
* @throws org.apache.maven.api.services.ArtifactResolverException if the artifact resolution failed
*
* @see org.apache.maven.api.services.ArtifactResolver#resolve(Session, Collection)
*/
@Nonnull
Collection<DownloadedArtifact> resolveArtifacts(@Nonnull Collection<? extends ArtifactCoordinates> coordinates);
/**
* Shortcut for {@code getService(ArtifactResolver.class).resolve(...)}.
*
* @param coordinates coordinates of all artifacts to resolve
* @param repositories repositories to use, if {@code null}, the session repositories are used
* @return requested artifacts together with the paths to their files
* @throws org.apache.maven.api.services.ArtifactResolverException if the artifact resolution failed
*
* @see org.apache.maven.api.services.ArtifactResolver#resolve(Session, Collection)
*/
@Nonnull
Collection<DownloadedArtifact> resolveArtifacts(
@Nonnull Collection<? extends ArtifactCoordinates> coordinates,
@Nullable List<RemoteRepository> repositories);
/**
* Shortcut for {@code getService(ArtifactResolver.class).resolve(...)}.
*
* @param artifact the artifact to resolve
* @return requested artifact together with the path to its file
* @throws org.apache.maven.api.services.ArtifactResolverException if the artifact resolution failed
*
* @see org.apache.maven.api.services.ArtifactResolver#resolve(Session, Collection)
*/
@Nonnull
DownloadedArtifact resolveArtifact(@Nonnull Artifact artifact);
/**
* Shortcut for {@code getService(ArtifactResolver.class).resolve(...)}.
*
* @param artifact the artifact to resolve
* @param repositories repositories to use, if {@code null}, the session repositories are used
* @return requested artifact together with the path to its file
* @throws org.apache.maven.api.services.ArtifactResolverException if the artifact resolution failed
*
* @see org.apache.maven.api.services.ArtifactResolver#resolve(Session, Collection)
*/
@Nonnull
DownloadedArtifact resolveArtifact(@Nonnull Artifact artifact, @Nullable List<RemoteRepository> repositories);
/**
* Shortcut for {@code getService(ArtifactResolver.class).resolve(...)}.
*
* @param artifacts all artifacts to resolve
* @return requested artifacts together with the paths to their files
* @throws org.apache.maven.api.services.ArtifactResolverException if the artifact resolution failed
*
* @see org.apache.maven.api.services.ArtifactResolver#resolve(Session, Collection)
*/
@Nonnull
Collection<DownloadedArtifact> resolveArtifacts(@Nonnull Artifact... artifacts);
/**
* Shortcut for {@code getService(ArtifactInstaller.class).install(...)}.
*
* @param artifacts the artifacts to install
* @throws org.apache.maven.api.services.ArtifactInstallerException if the artifacts installation failed
*
* @see org.apache.maven.api.services.ArtifactInstaller#install(Session, Collection)
*/
void installArtifacts(@Nonnull ProducedArtifact... artifacts);
/**
* Shortcut for {@code getService(ArtifactInstaller.class).install(...)}.
*
* @param artifacts the artifacts to install
* @throws org.apache.maven.api.services.ArtifactInstallerException if the artifacts installation failed
*
* @see org.apache.maven.api.services.ArtifactInstaller#install(Session, Collection)
*/
void installArtifacts(@Nonnull Collection<ProducedArtifact> artifacts);
/**
* Shortcut for {@code getService(ArtifactDeployer.class).deploy(...)}.
*
* @param repository the repository where to deploy artifacts
* @param artifacts the artifacts to deploy
* @throws org.apache.maven.api.services.ArtifactDeployerException if the artifacts deployment failed
*
* @see org.apache.maven.api.services.ArtifactDeployer#deploy(Session, RemoteRepository, Collection)
*/
void deployArtifact(@Nonnull RemoteRepository repository, @Nonnull ProducedArtifact... artifacts);
/**
* Shortcut for {@code getService(ArtifactManager.class).setPath(...)}.
*
* @param artifact the artifact for which to associate a path
* @param path path to associate to the given artifact
*
* @see org.apache.maven.api.services.ArtifactManager#setPath(ProducedArtifact, Path)
*/
void setArtifactPath(@Nonnull ProducedArtifact artifact, @Nonnull Path path);
/**
* Shortcut for {@code getService(ArtifactManager.class).getPath(...)}.
*
* @param artifact the artifact for which to get a path
* @return path associated to the given artifact
*
* @see org.apache.maven.api.services.ArtifactManager#getPath(Artifact)
*/
@Nonnull
Optional<Path> getArtifactPath(@Nonnull Artifact artifact);
/**
* Gets the relative path for a locally installed artifact. Note that the artifact need not actually exist yet at
* the returned location, the path merely indicates where the artifact would eventually be stored.
* <p>
* Shortcut for {@code getService(LocalArtifactManager.class).getPathForLocalArtitact(...)}.
*
* @param artifact the artifact for which to get a local path
* @return local path associated to the given artifact, or {@code null} if none
*
* @see org.apache.maven.api.services.LocalRepositoryManager#getPathForLocalArtifact(Session, LocalRepository, Artifact)
*/
Path getPathForLocalArtifact(@Nonnull Artifact artifact);
/**
* Gets the relative path for an artifact cached from a remote repository.
* Note that the artifact need not actually exist yet at the returned location,
* the path merely indicates where the artifact would eventually be stored.
* <p>
* Shortcut for {@code getService(LocalArtifactManager.class).getPathForRemoteArtifact(...)}.
*
* @param remote the repository from where artifacts are downloaded
* @param artifact the artifact for which to get a path
* @return path associated to the given artifact
*
* @see org.apache.maven.api.services.LocalRepositoryManager#getPathForRemoteArtifact(Session, LocalRepository, RemoteRepository, Artifact)
*/
@Nonnull
Path getPathForRemoteArtifact(@Nonnull RemoteRepository remote, @Nonnull Artifact artifact);
/**
* Checks whether a given artifact version is considered a {@code SNAPSHOT} or not.
* <p>
* Shortcut for {@code getService(ArtifactManager.class).isSnapshot(...)}.
* <p>
* In case there is {@link Artifact} in scope, the recommended way to perform this check is
* use of {@link Artifact#isSnapshot()} instead.
*
* @param version artifact version
* @return whether the given version is a snapshot
*
* @see org.apache.maven.api.services.VersionParser#isSnapshot(String)
*/
boolean isVersionSnapshot(@Nonnull String version);
/**
* Shortcut for {@code getService(DependencyResolver.class).collect(...)}
*
* @param artifact artifact for which to get the dependencies, including transitive ones
* @param scope the {link PathScope} to collect dependencies, must not be {@code null}
* @return root node of the dependency graph for the given artifact
*
* @see org.apache.maven.api.services.DependencyResolver#collect(Session, Artifact, PathScope)
* @throws org.apache.maven.api.services.DependencyResolverException if the dependency collection failed
*/
@Nonnull
Node collectDependencies(@Nonnull Artifact artifact, @Nonnull PathScope scope);
/**
* Shortcut for {@code getService(DependencyResolver.class).collect(...)}
*
* @param project project for which to get the dependencies, including transitive ones
* @param scope the {link PathScope} to collect dependencies, must not be {@code null}
* @return root node of the dependency graph for the given project
*
* @see org.apache.maven.api.services.DependencyResolver#collect(Session, Project, PathScope)
* @throws org.apache.maven.api.services.DependencyResolverException if the dependency collection failed
*/
@Nonnull
Node collectDependencies(@Nonnull Project project, @Nonnull PathScope scope);
/**
* Collects the transitive dependencies of some artifacts and builds a dependency graph. Note that this operation is
* only concerned about determining the coordinates of the transitive dependencies and does not actually resolve the
* artifact files.
* <p>
* Shortcut for {@code getService(DependencyResolver.class).resolve(...)}
*
* @param dependency dependency for which to get transitive dependencies
* @param scope the {link PathScope} to collect dependencies, must not be {@code null}
* @return root node of the dependency graph for the given artifact
*
* @see org.apache.maven.api.services.DependencyResolver#collect(Session, DependencyCoordinates, PathScope)
* @throws org.apache.maven.api.services.DependencyResolverException if the dependency collection failed
*/
@Nonnull
Node collectDependencies(@Nonnull DependencyCoordinates dependency, @Nonnull PathScope scope);
/**
* Shortcut for {@code getService(DependencyResolver.class).flatten(...)}.
*
* @param node node for which to get a flattened list
* @param scope build path scope (main compile, test compile, etc.) of desired nodes
* @return flattened list of node with the given build path scope
* @throws org.apache.maven.api.services.DependencyResolverException if the dependency flattening failed
*
* @see org.apache.maven.api.services.DependencyResolver#flatten(Session, Node, PathScope)
*/
@Nonnull
List<Node> flattenDependencies(@Nonnull Node node, @Nonnull PathScope scope);
/**
* Shortcut for {@code getService(DependencyResolver.class).resolve(...).getPaths()}.
*
* @param dependencyCoordinates coordinates of the dependency for which to get the paths
* @return paths to the transitive dependencies of the given dependency
*
* @see org.apache.maven.api.services.DependencyResolver#resolve(Session, DependencyCoordinates)
*/
@Nonnull
List<Path> resolveDependencies(@Nonnull DependencyCoordinates dependencyCoordinates);
/**
* Shortcut for {@code getService(DependencyResolver.class).resolve(...).getPaths()}.
*
* @param dependencyCoordinates coordinates of all dependency for which to get the paths
* @return paths to the transitive dependencies of the given dependencies
*
* @see org.apache.maven.api.services.DependencyResolver#resolve(Session, List)
*/
@Nonnull
List<Path> resolveDependencies(@Nonnull List<DependencyCoordinates> dependencyCoordinates);
/**
* Shortcut for {@code getService(DependencyResolver.class).resolve(...).getPaths()}.
*
* @param project the project for which to get dependencies
* @param scope build path scope (main compile, test compile, etc.) of desired paths
* @return paths to the transitive dependencies of the given project
*
* @see org.apache.maven.api.services.DependencyResolver#resolve(Session, Project, PathScope)
*/
@Nonnull
List<Path> resolveDependencies(@Nonnull Project project, @Nonnull PathScope scope);
/**
* Shortcut for {@code getService(DependencyResolver.class).resolve(...).getDispatchedPaths()}.
*
* @param dependencyCoordinates coordinates of the dependency for which to get the paths
* @param scope build path scope (main compile, test compile, etc.) of desired paths
* @param desiredTypes the type of paths to include in the result
* @return paths to the transitive dependencies of the given project
*
* @see org.apache.maven.api.services.DependencyResolver#resolve(Session, Project, PathScope)
*/
@Nonnull
Map<PathType, List<Path>> resolveDependencies(
@Nonnull DependencyCoordinates dependencyCoordinates,
@Nonnull PathScope scope,
@Nonnull Collection<PathType> desiredTypes);
/**
* Shortcut for {@code getService(DependencyResolver.class).resolve(...).getDispatchedPaths()}.
*
* @param project the project for which to get dependencies
* @param scope build path scope (main compile, test compile, etc.) of desired paths
* @param desiredTypes the type of paths to include in the result
* @return paths to the transitive dependencies of the given project
*
* @see org.apache.maven.api.services.DependencyResolver#resolve(Session, Project, PathScope)
*/
@Nonnull
Map<PathType, List<Path>> resolveDependencies(
@Nonnull Project project, @Nonnull PathScope scope, @Nonnull Collection<PathType> desiredTypes);
/**
* Resolves an artifact's meta version (if any) to a concrete version.
* For example, resolves "1.0-SNAPSHOT" to "1.0-20090208.132618-23".
* <p>
* Shortcut for {@code getService(VersionResolver.class).resolve(...)}
*
* @param artifact the artifact for which to resolve the version
* @return resolved version of the given artifact
* @throws org.apache.maven.api.services.VersionResolverException if the resolution failed
*
* @see org.apache.maven.api.services.VersionResolver#resolve(Session, ArtifactCoordinates) (String)
*/
@Nonnull
Version resolveVersion(@Nonnull ArtifactCoordinates artifact) throws VersionResolverException;
/**
* Expands a version range to a list of matching versions, in ascending order.
* For example, resolves "[3.8,4.0)" to "3.8", "3.8.1", "3.8.2".
* The returned list of versions is only dependent on the configured repositories and their contents.
* The supplied request may also refer to a single concrete version rather than a version range.
* In this case though, the result contains simply the (parsed) input version, regardless of the
* repositories and their contents.
*
* @param artifact the artifact for which to resolve the versions
* @return a list of resolved {@code Version}s.
* @throws org.apache.maven.api.services.VersionRangeResolverException if the resolution failed
* @see org.apache.maven.api.services.VersionRangeResolver#resolve(Session, ArtifactCoordinates) (String)
*/
@Nonnull
List<Version> resolveVersionRange(@Nonnull ArtifactCoordinates artifact) throws VersionResolverException;
/**
* Expands a version range to a list of matching versions, in ascending order.
* For example, resolves "[3.8,4.0)" to "3.8", "3.8.1", "3.8.2".
* The returned list of versions is only dependent on the configured repositories and their contents.
* The supplied request may also refer to a single concrete version rather than a version range.
* In this case though, the result contains simply the (parsed) input version, regardless of the
* repositories and their contents.
*
* @param artifact the artifact for which to resolve the versions
* @param repositories the repositories to use, or the session repositories if {@code null}
* @return a list of resolved {@code Version}s.
* @throws org.apache.maven.api.services.VersionRangeResolverException if the resolution failed
* @see org.apache.maven.api.services.VersionRangeResolver#resolve(Session, ArtifactCoordinates) (String)
*/
@Nonnull
List<Version> resolveVersionRange(@Nonnull ArtifactCoordinates artifact, List<RemoteRepository> repositories)
throws VersionResolverException;
/**
* Resolves the highest available version of a version range.
* The returned version is only dependent on the configured repositories and their contents.
* The supplied request may also refer to a single concrete version rather than a version range.
* In this case though, the result contains simply the (parsed) input version, regardless of the
* repositories and their contents.
*
* @param artifact the artifact for which to resolve the versions
* @param repositories the repositories to use, or the session repositories if {@code null}
* @return the highest resolved {@code Version}.
* @throws org.apache.maven.api.services.VersionRangeResolverException if the resolution failed
* @see org.apache.maven.api.services.VersionRangeResolver#resolve(Session, ArtifactCoordinates) (String)
*/
@Nonnull
Optional<Version> resolveHighestVersion(@Nonnull ArtifactCoordinates artifact, List<RemoteRepository> repositories)
throws VersionResolverException;
/**
* Parses the specified version string, for example "1.0".
* <p>
* Shortcut for {@code getService(VersionParser.class).parseVersion(...)}.
*
* @param version the version string to parse
* @return the version parsed from the given string
* @throws org.apache.maven.api.services.VersionParserException if the parsing failed
* @see org.apache.maven.api.services.VersionParser#parseVersion(String)
*/
@Nonnull
Version parseVersion(@Nonnull String version);
/**
* Parses the specified version range specification, for example "[1.0,2.0)".
* <p>
* Shortcut for {@code getService(VersionParser.class).parseVersionRange(...)}.
*
* @param versionRange the version string to parse
* @return the version range parsed from the given string
* @throws org.apache.maven.api.services.VersionParserException if the parsing failed
* @see org.apache.maven.api.services.VersionParser#parseVersionRange(String)
*/
@Nonnull
VersionRange parseVersionRange(@Nonnull String versionRange);
/**
* Parses the specified version constraint specification, for example "1.0" or "[1.0,2.0)".
* <p>
* Shortcut for {@code getService(VersionParser.class).parseVersionConstraint(...)}.
*
* @param versionConstraint the version string to parse
* @return the version constraint parsed from the given string
* @throws org.apache.maven.api.services.VersionParserException if the parsing failed
* @see org.apache.maven.api.services.VersionParser#parseVersionConstraint(String)
*/
@Nonnull
VersionConstraint parseVersionConstraint(@Nonnull String versionConstraint);
/**
* Obtain the {@link Type} from the specified {@code id}.
* <p>
* Shortcut for {@code getService(TypeRegistry.class).require(...)}.
*
* @see org.apache.maven.api.services.TypeRegistry#require(String)
*/
@Nonnull
Type requireType(@Nonnull String id);
/**
* Obtain the {@link Language} from the specified {@code id}.
* <p>
* Shortcut for {@code getService(LanguageRegistry.class).require(...)}.
*
* @see org.apache.maven.api.services.LanguageRegistry#require(String)
*/
@Nonnull
Language requireLanguage(@Nonnull String id);
/**
* Obtain the {@link Packaging} from the specified {@code id}.
* <p>
* Shortcut for {@code getService(PackagingRegistry.class).require(...)}.
*
* @see org.apache.maven.api.services.PackagingRegistry#require(String)
*/
@Nonnull
Packaging requirePackaging(@Nonnull String id);
/**
* Obtain the {@link ProjectScope} from the specified {@code id}.
* <p>
* Shortcut for {@code getService(ProjectScopeRegistry.class).require(...)}.
*
* @see org.apache.maven.api.services.ProjectScopeRegistry#require(String)
*/
@Nonnull
ProjectScope requireProjectScope(@Nonnull String id);
/**
* Obtain the {@link DependencyScope} from the specified {@code id}.
* <p>
* Shortcut for {@code DependencyScope.forId(...)} with a verification that the given identifier exists.
*
* @param id the identifier of the scope (case-sensitive)
* @return the scope for the given identifier (never null)
* @throws IllegalArgumentException if the given identifier is not a known scope
*
* @see org.apache.maven.api.DependencyScope#forId(String)
*/
@Nonnull
DependencyScope requireDependencyScope(@Nonnull String id);
/**
* Obtain the {@link PathScope} from the specified {@code id}.
* <p>
* Shortcut for {@code getService(PathScopeRegistry.class).require(...)}.
*
* @see org.apache.maven.api.services.PathScopeRegistry#require(String)
*/
@Nonnull
PathScope requirePathScope(@Nonnull String id);
}
|
googleapis/google-cloud-java | 36,991 | java-grafeas/src/main/java/io/grafeas/v1/BatchCreateOccurrencesRequest.java | /*
* Copyright 2025 The Grafeas Authors. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: grafeas/v1/grafeas.proto
// Protobuf Java Version: 3.25.8
package io.grafeas.v1;
/**
*
*
* <pre>
* Request to create occurrences in batch.
* </pre>
*
* Protobuf type {@code grafeas.v1.BatchCreateOccurrencesRequest}
*/
public final class BatchCreateOccurrencesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:grafeas.v1.BatchCreateOccurrencesRequest)
BatchCreateOccurrencesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use BatchCreateOccurrencesRequest.newBuilder() to construct.
private BatchCreateOccurrencesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private BatchCreateOccurrencesRequest() {
parent_ = "";
occurrences_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new BatchCreateOccurrencesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return io.grafeas.v1.GrafeasOuterClass
.internal_static_grafeas_v1_BatchCreateOccurrencesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return io.grafeas.v1.GrafeasOuterClass
.internal_static_grafeas_v1_BatchCreateOccurrencesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
io.grafeas.v1.BatchCreateOccurrencesRequest.class,
io.grafeas.v1.BatchCreateOccurrencesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* The name of the project in the form of `projects/[PROJECT_ID]`, under which
* the occurrences are to be created.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* The name of the project in the form of `projects/[PROJECT_ID]`, under which
* the occurrences are to be created.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int OCCURRENCES_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List<io.grafeas.v1.Occurrence> occurrences_;
/**
*
*
* <pre>
* The occurrences to create. Max allowed length is 1000.
* </pre>
*
* <code>
* repeated .grafeas.v1.Occurrence occurrences = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public java.util.List<io.grafeas.v1.Occurrence> getOccurrencesList() {
return occurrences_;
}
/**
*
*
* <pre>
* The occurrences to create. Max allowed length is 1000.
* </pre>
*
* <code>
* repeated .grafeas.v1.Occurrence occurrences = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public java.util.List<? extends io.grafeas.v1.OccurrenceOrBuilder> getOccurrencesOrBuilderList() {
return occurrences_;
}
/**
*
*
* <pre>
* The occurrences to create. Max allowed length is 1000.
* </pre>
*
* <code>
* repeated .grafeas.v1.Occurrence occurrences = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public int getOccurrencesCount() {
return occurrences_.size();
}
/**
*
*
* <pre>
* The occurrences to create. Max allowed length is 1000.
* </pre>
*
* <code>
* repeated .grafeas.v1.Occurrence occurrences = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public io.grafeas.v1.Occurrence getOccurrences(int index) {
return occurrences_.get(index);
}
/**
*
*
* <pre>
* The occurrences to create. Max allowed length is 1000.
* </pre>
*
* <code>
* repeated .grafeas.v1.Occurrence occurrences = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public io.grafeas.v1.OccurrenceOrBuilder getOccurrencesOrBuilder(int index) {
return occurrences_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
for (int i = 0; i < occurrences_.size(); i++) {
output.writeMessage(2, occurrences_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
for (int i = 0; i < occurrences_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, occurrences_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof io.grafeas.v1.BatchCreateOccurrencesRequest)) {
return super.equals(obj);
}
io.grafeas.v1.BatchCreateOccurrencesRequest other =
(io.grafeas.v1.BatchCreateOccurrencesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getOccurrencesList().equals(other.getOccurrencesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (getOccurrencesCount() > 0) {
hash = (37 * hash) + OCCURRENCES_FIELD_NUMBER;
hash = (53 * hash) + getOccurrencesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static io.grafeas.v1.BatchCreateOccurrencesRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.grafeas.v1.BatchCreateOccurrencesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.grafeas.v1.BatchCreateOccurrencesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.grafeas.v1.BatchCreateOccurrencesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.grafeas.v1.BatchCreateOccurrencesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.grafeas.v1.BatchCreateOccurrencesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.grafeas.v1.BatchCreateOccurrencesRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static io.grafeas.v1.BatchCreateOccurrencesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static io.grafeas.v1.BatchCreateOccurrencesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static io.grafeas.v1.BatchCreateOccurrencesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static io.grafeas.v1.BatchCreateOccurrencesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static io.grafeas.v1.BatchCreateOccurrencesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(io.grafeas.v1.BatchCreateOccurrencesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request to create occurrences in batch.
* </pre>
*
* Protobuf type {@code grafeas.v1.BatchCreateOccurrencesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:grafeas.v1.BatchCreateOccurrencesRequest)
io.grafeas.v1.BatchCreateOccurrencesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return io.grafeas.v1.GrafeasOuterClass
.internal_static_grafeas_v1_BatchCreateOccurrencesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return io.grafeas.v1.GrafeasOuterClass
.internal_static_grafeas_v1_BatchCreateOccurrencesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
io.grafeas.v1.BatchCreateOccurrencesRequest.class,
io.grafeas.v1.BatchCreateOccurrencesRequest.Builder.class);
}
// Construct using io.grafeas.v1.BatchCreateOccurrencesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
if (occurrencesBuilder_ == null) {
occurrences_ = java.util.Collections.emptyList();
} else {
occurrences_ = null;
occurrencesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return io.grafeas.v1.GrafeasOuterClass
.internal_static_grafeas_v1_BatchCreateOccurrencesRequest_descriptor;
}
@java.lang.Override
public io.grafeas.v1.BatchCreateOccurrencesRequest getDefaultInstanceForType() {
return io.grafeas.v1.BatchCreateOccurrencesRequest.getDefaultInstance();
}
@java.lang.Override
public io.grafeas.v1.BatchCreateOccurrencesRequest build() {
io.grafeas.v1.BatchCreateOccurrencesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public io.grafeas.v1.BatchCreateOccurrencesRequest buildPartial() {
io.grafeas.v1.BatchCreateOccurrencesRequest result =
new io.grafeas.v1.BatchCreateOccurrencesRequest(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(io.grafeas.v1.BatchCreateOccurrencesRequest result) {
if (occurrencesBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
occurrences_ = java.util.Collections.unmodifiableList(occurrences_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.occurrences_ = occurrences_;
} else {
result.occurrences_ = occurrencesBuilder_.build();
}
}
private void buildPartial0(io.grafeas.v1.BatchCreateOccurrencesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof io.grafeas.v1.BatchCreateOccurrencesRequest) {
return mergeFrom((io.grafeas.v1.BatchCreateOccurrencesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(io.grafeas.v1.BatchCreateOccurrencesRequest other) {
if (other == io.grafeas.v1.BatchCreateOccurrencesRequest.getDefaultInstance()) return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (occurrencesBuilder_ == null) {
if (!other.occurrences_.isEmpty()) {
if (occurrences_.isEmpty()) {
occurrences_ = other.occurrences_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureOccurrencesIsMutable();
occurrences_.addAll(other.occurrences_);
}
onChanged();
}
} else {
if (!other.occurrences_.isEmpty()) {
if (occurrencesBuilder_.isEmpty()) {
occurrencesBuilder_.dispose();
occurrencesBuilder_ = null;
occurrences_ = other.occurrences_;
bitField0_ = (bitField0_ & ~0x00000002);
occurrencesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getOccurrencesFieldBuilder()
: null;
} else {
occurrencesBuilder_.addAllMessages(other.occurrences_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
io.grafeas.v1.Occurrence m =
input.readMessage(io.grafeas.v1.Occurrence.parser(), extensionRegistry);
if (occurrencesBuilder_ == null) {
ensureOccurrencesIsMutable();
occurrences_.add(m);
} else {
occurrencesBuilder_.addMessage(m);
}
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* The name of the project in the form of `projects/[PROJECT_ID]`, under which
* the occurrences are to be created.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The name of the project in the form of `projects/[PROJECT_ID]`, under which
* the occurrences are to be created.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The name of the project in the form of `projects/[PROJECT_ID]`, under which
* the occurrences are to be created.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the project in the form of `projects/[PROJECT_ID]`, under which
* the occurrences are to be created.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the project in the form of `projects/[PROJECT_ID]`, under which
* the occurrences are to be created.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.util.List<io.grafeas.v1.Occurrence> occurrences_ =
java.util.Collections.emptyList();
private void ensureOccurrencesIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
occurrences_ = new java.util.ArrayList<io.grafeas.v1.Occurrence>(occurrences_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
io.grafeas.v1.Occurrence,
io.grafeas.v1.Occurrence.Builder,
io.grafeas.v1.OccurrenceOrBuilder>
occurrencesBuilder_;
/**
*
*
* <pre>
* The occurrences to create. Max allowed length is 1000.
* </pre>
*
* <code>
* repeated .grafeas.v1.Occurrence occurrences = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public java.util.List<io.grafeas.v1.Occurrence> getOccurrencesList() {
if (occurrencesBuilder_ == null) {
return java.util.Collections.unmodifiableList(occurrences_);
} else {
return occurrencesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The occurrences to create. Max allowed length is 1000.
* </pre>
*
* <code>
* repeated .grafeas.v1.Occurrence occurrences = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public int getOccurrencesCount() {
if (occurrencesBuilder_ == null) {
return occurrences_.size();
} else {
return occurrencesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The occurrences to create. Max allowed length is 1000.
* </pre>
*
* <code>
* repeated .grafeas.v1.Occurrence occurrences = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public io.grafeas.v1.Occurrence getOccurrences(int index) {
if (occurrencesBuilder_ == null) {
return occurrences_.get(index);
} else {
return occurrencesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The occurrences to create. Max allowed length is 1000.
* </pre>
*
* <code>
* repeated .grafeas.v1.Occurrence occurrences = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setOccurrences(int index, io.grafeas.v1.Occurrence value) {
if (occurrencesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOccurrencesIsMutable();
occurrences_.set(index, value);
onChanged();
} else {
occurrencesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The occurrences to create. Max allowed length is 1000.
* </pre>
*
* <code>
* repeated .grafeas.v1.Occurrence occurrences = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setOccurrences(int index, io.grafeas.v1.Occurrence.Builder builderForValue) {
if (occurrencesBuilder_ == null) {
ensureOccurrencesIsMutable();
occurrences_.set(index, builderForValue.build());
onChanged();
} else {
occurrencesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The occurrences to create. Max allowed length is 1000.
* </pre>
*
* <code>
* repeated .grafeas.v1.Occurrence occurrences = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addOccurrences(io.grafeas.v1.Occurrence value) {
if (occurrencesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOccurrencesIsMutable();
occurrences_.add(value);
onChanged();
} else {
occurrencesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The occurrences to create. Max allowed length is 1000.
* </pre>
*
* <code>
* repeated .grafeas.v1.Occurrence occurrences = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addOccurrences(int index, io.grafeas.v1.Occurrence value) {
if (occurrencesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOccurrencesIsMutable();
occurrences_.add(index, value);
onChanged();
} else {
occurrencesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The occurrences to create. Max allowed length is 1000.
* </pre>
*
* <code>
* repeated .grafeas.v1.Occurrence occurrences = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addOccurrences(io.grafeas.v1.Occurrence.Builder builderForValue) {
if (occurrencesBuilder_ == null) {
ensureOccurrencesIsMutable();
occurrences_.add(builderForValue.build());
onChanged();
} else {
occurrencesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The occurrences to create. Max allowed length is 1000.
* </pre>
*
* <code>
* repeated .grafeas.v1.Occurrence occurrences = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addOccurrences(int index, io.grafeas.v1.Occurrence.Builder builderForValue) {
if (occurrencesBuilder_ == null) {
ensureOccurrencesIsMutable();
occurrences_.add(index, builderForValue.build());
onChanged();
} else {
occurrencesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The occurrences to create. Max allowed length is 1000.
* </pre>
*
* <code>
* repeated .grafeas.v1.Occurrence occurrences = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addAllOccurrences(
java.lang.Iterable<? extends io.grafeas.v1.Occurrence> values) {
if (occurrencesBuilder_ == null) {
ensureOccurrencesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, occurrences_);
onChanged();
} else {
occurrencesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The occurrences to create. Max allowed length is 1000.
* </pre>
*
* <code>
* repeated .grafeas.v1.Occurrence occurrences = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearOccurrences() {
if (occurrencesBuilder_ == null) {
occurrences_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
occurrencesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The occurrences to create. Max allowed length is 1000.
* </pre>
*
* <code>
* repeated .grafeas.v1.Occurrence occurrences = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder removeOccurrences(int index) {
if (occurrencesBuilder_ == null) {
ensureOccurrencesIsMutable();
occurrences_.remove(index);
onChanged();
} else {
occurrencesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The occurrences to create. Max allowed length is 1000.
* </pre>
*
* <code>
* repeated .grafeas.v1.Occurrence occurrences = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public io.grafeas.v1.Occurrence.Builder getOccurrencesBuilder(int index) {
return getOccurrencesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The occurrences to create. Max allowed length is 1000.
* </pre>
*
* <code>
* repeated .grafeas.v1.Occurrence occurrences = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public io.grafeas.v1.OccurrenceOrBuilder getOccurrencesOrBuilder(int index) {
if (occurrencesBuilder_ == null) {
return occurrences_.get(index);
} else {
return occurrencesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The occurrences to create. Max allowed length is 1000.
* </pre>
*
* <code>
* repeated .grafeas.v1.Occurrence occurrences = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public java.util.List<? extends io.grafeas.v1.OccurrenceOrBuilder>
getOccurrencesOrBuilderList() {
if (occurrencesBuilder_ != null) {
return occurrencesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(occurrences_);
}
}
/**
*
*
* <pre>
* The occurrences to create. Max allowed length is 1000.
* </pre>
*
* <code>
* repeated .grafeas.v1.Occurrence occurrences = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public io.grafeas.v1.Occurrence.Builder addOccurrencesBuilder() {
return getOccurrencesFieldBuilder().addBuilder(io.grafeas.v1.Occurrence.getDefaultInstance());
}
/**
*
*
* <pre>
* The occurrences to create. Max allowed length is 1000.
* </pre>
*
* <code>
* repeated .grafeas.v1.Occurrence occurrences = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public io.grafeas.v1.Occurrence.Builder addOccurrencesBuilder(int index) {
return getOccurrencesFieldBuilder()
.addBuilder(index, io.grafeas.v1.Occurrence.getDefaultInstance());
}
/**
*
*
* <pre>
* The occurrences to create. Max allowed length is 1000.
* </pre>
*
* <code>
* repeated .grafeas.v1.Occurrence occurrences = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public java.util.List<io.grafeas.v1.Occurrence.Builder> getOccurrencesBuilderList() {
return getOccurrencesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
io.grafeas.v1.Occurrence,
io.grafeas.v1.Occurrence.Builder,
io.grafeas.v1.OccurrenceOrBuilder>
getOccurrencesFieldBuilder() {
if (occurrencesBuilder_ == null) {
occurrencesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
io.grafeas.v1.Occurrence,
io.grafeas.v1.Occurrence.Builder,
io.grafeas.v1.OccurrenceOrBuilder>(
occurrences_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean());
occurrences_ = null;
}
return occurrencesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:grafeas.v1.BatchCreateOccurrencesRequest)
}
// @@protoc_insertion_point(class_scope:grafeas.v1.BatchCreateOccurrencesRequest)
private static final io.grafeas.v1.BatchCreateOccurrencesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new io.grafeas.v1.BatchCreateOccurrencesRequest();
}
public static io.grafeas.v1.BatchCreateOccurrencesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<BatchCreateOccurrencesRequest> PARSER =
new com.google.protobuf.AbstractParser<BatchCreateOccurrencesRequest>() {
@java.lang.Override
public BatchCreateOccurrencesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<BatchCreateOccurrencesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<BatchCreateOccurrencesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public io.grafeas.v1.BatchCreateOccurrencesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,929 | java-infra-manager/proto-google-cloud-infra-manager-v1/src/main/java/com/google/cloud/config/v1/TerraformError.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/config/v1/config.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.config.v1;
/**
*
*
* <pre>
* Errors encountered during actuation using Terraform
* </pre>
*
* Protobuf type {@code google.cloud.config.v1.TerraformError}
*/
public final class TerraformError extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.config.v1.TerraformError)
TerraformErrorOrBuilder {
private static final long serialVersionUID = 0L;
// Use TerraformError.newBuilder() to construct.
private TerraformError(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TerraformError() {
resourceAddress_ = "";
errorDescription_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new TerraformError();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.config.v1.ConfigProto
.internal_static_google_cloud_config_v1_TerraformError_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.config.v1.ConfigProto
.internal_static_google_cloud_config_v1_TerraformError_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.config.v1.TerraformError.class,
com.google.cloud.config.v1.TerraformError.Builder.class);
}
private int bitField0_;
public static final int RESOURCE_ADDRESS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object resourceAddress_ = "";
/**
*
*
* <pre>
* Address of the resource associated with the error,
* e.g. `google_compute_network.vpc_network`.
* </pre>
*
* <code>string resource_address = 1;</code>
*
* @return The resourceAddress.
*/
@java.lang.Override
public java.lang.String getResourceAddress() {
java.lang.Object ref = resourceAddress_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceAddress_ = s;
return s;
}
}
/**
*
*
* <pre>
* Address of the resource associated with the error,
* e.g. `google_compute_network.vpc_network`.
* </pre>
*
* <code>string resource_address = 1;</code>
*
* @return The bytes for resourceAddress.
*/
@java.lang.Override
public com.google.protobuf.ByteString getResourceAddressBytes() {
java.lang.Object ref = resourceAddress_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resourceAddress_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int HTTP_RESPONSE_CODE_FIELD_NUMBER = 2;
private int httpResponseCode_ = 0;
/**
*
*
* <pre>
* HTTP response code returned from Google Cloud Platform APIs when Terraform
* fails to provision the resource. If unset or 0, no HTTP response code was
* returned by Terraform.
* </pre>
*
* <code>int32 http_response_code = 2;</code>
*
* @return The httpResponseCode.
*/
@java.lang.Override
public int getHttpResponseCode() {
return httpResponseCode_;
}
public static final int ERROR_DESCRIPTION_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object errorDescription_ = "";
/**
*
*
* <pre>
* A human-readable error description.
* </pre>
*
* <code>string error_description = 3;</code>
*
* @return The errorDescription.
*/
@java.lang.Override
public java.lang.String getErrorDescription() {
java.lang.Object ref = errorDescription_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
errorDescription_ = s;
return s;
}
}
/**
*
*
* <pre>
* A human-readable error description.
* </pre>
*
* <code>string error_description = 3;</code>
*
* @return The bytes for errorDescription.
*/
@java.lang.Override
public com.google.protobuf.ByteString getErrorDescriptionBytes() {
java.lang.Object ref = errorDescription_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
errorDescription_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ERROR_FIELD_NUMBER = 4;
private com.google.rpc.Status error_;
/**
*
*
* <pre>
* Output only. Original error response from underlying Google API, if
* available.
* </pre>
*
* <code>.google.rpc.Status error = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return Whether the error field is set.
*/
@java.lang.Override
public boolean hasError() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Output only. Original error response from underlying Google API, if
* available.
* </pre>
*
* <code>.google.rpc.Status error = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The error.
*/
@java.lang.Override
public com.google.rpc.Status getError() {
return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_;
}
/**
*
*
* <pre>
* Output only. Original error response from underlying Google API, if
* available.
* </pre>
*
* <code>.google.rpc.Status error = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
@java.lang.Override
public com.google.rpc.StatusOrBuilder getErrorOrBuilder() {
return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceAddress_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceAddress_);
}
if (httpResponseCode_ != 0) {
output.writeInt32(2, httpResponseCode_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(errorDescription_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, errorDescription_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(4, getError());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceAddress_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceAddress_);
}
if (httpResponseCode_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, httpResponseCode_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(errorDescription_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, errorDescription_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(4, getError());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.config.v1.TerraformError)) {
return super.equals(obj);
}
com.google.cloud.config.v1.TerraformError other =
(com.google.cloud.config.v1.TerraformError) obj;
if (!getResourceAddress().equals(other.getResourceAddress())) return false;
if (getHttpResponseCode() != other.getHttpResponseCode()) return false;
if (!getErrorDescription().equals(other.getErrorDescription())) return false;
if (hasError() != other.hasError()) return false;
if (hasError()) {
if (!getError().equals(other.getError())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + RESOURCE_ADDRESS_FIELD_NUMBER;
hash = (53 * hash) + getResourceAddress().hashCode();
hash = (37 * hash) + HTTP_RESPONSE_CODE_FIELD_NUMBER;
hash = (53 * hash) + getHttpResponseCode();
hash = (37 * hash) + ERROR_DESCRIPTION_FIELD_NUMBER;
hash = (53 * hash) + getErrorDescription().hashCode();
if (hasError()) {
hash = (37 * hash) + ERROR_FIELD_NUMBER;
hash = (53 * hash) + getError().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.config.v1.TerraformError parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.config.v1.TerraformError parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.config.v1.TerraformError parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.config.v1.TerraformError parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.config.v1.TerraformError parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.config.v1.TerraformError parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.config.v1.TerraformError parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.config.v1.TerraformError parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.config.v1.TerraformError parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.config.v1.TerraformError parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.config.v1.TerraformError parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.config.v1.TerraformError parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.config.v1.TerraformError prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Errors encountered during actuation using Terraform
* </pre>
*
* Protobuf type {@code google.cloud.config.v1.TerraformError}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.config.v1.TerraformError)
com.google.cloud.config.v1.TerraformErrorOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.config.v1.ConfigProto
.internal_static_google_cloud_config_v1_TerraformError_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.config.v1.ConfigProto
.internal_static_google_cloud_config_v1_TerraformError_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.config.v1.TerraformError.class,
com.google.cloud.config.v1.TerraformError.Builder.class);
}
// Construct using com.google.cloud.config.v1.TerraformError.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getErrorFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
resourceAddress_ = "";
httpResponseCode_ = 0;
errorDescription_ = "";
error_ = null;
if (errorBuilder_ != null) {
errorBuilder_.dispose();
errorBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.config.v1.ConfigProto
.internal_static_google_cloud_config_v1_TerraformError_descriptor;
}
@java.lang.Override
public com.google.cloud.config.v1.TerraformError getDefaultInstanceForType() {
return com.google.cloud.config.v1.TerraformError.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.config.v1.TerraformError build() {
com.google.cloud.config.v1.TerraformError result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.config.v1.TerraformError buildPartial() {
com.google.cloud.config.v1.TerraformError result =
new com.google.cloud.config.v1.TerraformError(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.config.v1.TerraformError result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.resourceAddress_ = resourceAddress_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.httpResponseCode_ = httpResponseCode_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.errorDescription_ = errorDescription_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000008) != 0)) {
result.error_ = errorBuilder_ == null ? error_ : errorBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.config.v1.TerraformError) {
return mergeFrom((com.google.cloud.config.v1.TerraformError) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.config.v1.TerraformError other) {
if (other == com.google.cloud.config.v1.TerraformError.getDefaultInstance()) return this;
if (!other.getResourceAddress().isEmpty()) {
resourceAddress_ = other.resourceAddress_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getHttpResponseCode() != 0) {
setHttpResponseCode(other.getHttpResponseCode());
}
if (!other.getErrorDescription().isEmpty()) {
errorDescription_ = other.errorDescription_;
bitField0_ |= 0x00000004;
onChanged();
}
if (other.hasError()) {
mergeError(other.getError());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
resourceAddress_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
httpResponseCode_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
errorDescription_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
input.readMessage(getErrorFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object resourceAddress_ = "";
/**
*
*
* <pre>
* Address of the resource associated with the error,
* e.g. `google_compute_network.vpc_network`.
* </pre>
*
* <code>string resource_address = 1;</code>
*
* @return The resourceAddress.
*/
public java.lang.String getResourceAddress() {
java.lang.Object ref = resourceAddress_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceAddress_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Address of the resource associated with the error,
* e.g. `google_compute_network.vpc_network`.
* </pre>
*
* <code>string resource_address = 1;</code>
*
* @return The bytes for resourceAddress.
*/
public com.google.protobuf.ByteString getResourceAddressBytes() {
java.lang.Object ref = resourceAddress_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resourceAddress_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Address of the resource associated with the error,
* e.g. `google_compute_network.vpc_network`.
* </pre>
*
* <code>string resource_address = 1;</code>
*
* @param value The resourceAddress to set.
* @return This builder for chaining.
*/
public Builder setResourceAddress(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
resourceAddress_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Address of the resource associated with the error,
* e.g. `google_compute_network.vpc_network`.
* </pre>
*
* <code>string resource_address = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearResourceAddress() {
resourceAddress_ = getDefaultInstance().getResourceAddress();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Address of the resource associated with the error,
* e.g. `google_compute_network.vpc_network`.
* </pre>
*
* <code>string resource_address = 1;</code>
*
* @param value The bytes for resourceAddress to set.
* @return This builder for chaining.
*/
public Builder setResourceAddressBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
resourceAddress_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int httpResponseCode_;
/**
*
*
* <pre>
* HTTP response code returned from Google Cloud Platform APIs when Terraform
* fails to provision the resource. If unset or 0, no HTTP response code was
* returned by Terraform.
* </pre>
*
* <code>int32 http_response_code = 2;</code>
*
* @return The httpResponseCode.
*/
@java.lang.Override
public int getHttpResponseCode() {
return httpResponseCode_;
}
/**
*
*
* <pre>
* HTTP response code returned from Google Cloud Platform APIs when Terraform
* fails to provision the resource. If unset or 0, no HTTP response code was
* returned by Terraform.
* </pre>
*
* <code>int32 http_response_code = 2;</code>
*
* @param value The httpResponseCode to set.
* @return This builder for chaining.
*/
public Builder setHttpResponseCode(int value) {
httpResponseCode_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* HTTP response code returned from Google Cloud Platform APIs when Terraform
* fails to provision the resource. If unset or 0, no HTTP response code was
* returned by Terraform.
* </pre>
*
* <code>int32 http_response_code = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearHttpResponseCode() {
bitField0_ = (bitField0_ & ~0x00000002);
httpResponseCode_ = 0;
onChanged();
return this;
}
private java.lang.Object errorDescription_ = "";
/**
*
*
* <pre>
* A human-readable error description.
* </pre>
*
* <code>string error_description = 3;</code>
*
* @return The errorDescription.
*/
public java.lang.String getErrorDescription() {
java.lang.Object ref = errorDescription_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
errorDescription_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A human-readable error description.
* </pre>
*
* <code>string error_description = 3;</code>
*
* @return The bytes for errorDescription.
*/
public com.google.protobuf.ByteString getErrorDescriptionBytes() {
java.lang.Object ref = errorDescription_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
errorDescription_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A human-readable error description.
* </pre>
*
* <code>string error_description = 3;</code>
*
* @param value The errorDescription to set.
* @return This builder for chaining.
*/
public Builder setErrorDescription(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
errorDescription_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* A human-readable error description.
* </pre>
*
* <code>string error_description = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearErrorDescription() {
errorDescription_ = getDefaultInstance().getErrorDescription();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* A human-readable error description.
* </pre>
*
* <code>string error_description = 3;</code>
*
* @param value The bytes for errorDescription to set.
* @return This builder for chaining.
*/
public Builder setErrorDescriptionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
errorDescription_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private com.google.rpc.Status error_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>
errorBuilder_;
/**
*
*
* <pre>
* Output only. Original error response from underlying Google API, if
* available.
* </pre>
*
* <code>.google.rpc.Status error = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return Whether the error field is set.
*/
public boolean hasError() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
*
*
* <pre>
* Output only. Original error response from underlying Google API, if
* available.
* </pre>
*
* <code>.google.rpc.Status error = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The error.
*/
public com.google.rpc.Status getError() {
if (errorBuilder_ == null) {
return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_;
} else {
return errorBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Output only. Original error response from underlying Google API, if
* available.
* </pre>
*
* <code>.google.rpc.Status error = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public Builder setError(com.google.rpc.Status value) {
if (errorBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
error_ = value;
} else {
errorBuilder_.setMessage(value);
}
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Original error response from underlying Google API, if
* available.
* </pre>
*
* <code>.google.rpc.Status error = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public Builder setError(com.google.rpc.Status.Builder builderForValue) {
if (errorBuilder_ == null) {
error_ = builderForValue.build();
} else {
errorBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Original error response from underlying Google API, if
* available.
* </pre>
*
* <code>.google.rpc.Status error = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public Builder mergeError(com.google.rpc.Status value) {
if (errorBuilder_ == null) {
if (((bitField0_ & 0x00000008) != 0)
&& error_ != null
&& error_ != com.google.rpc.Status.getDefaultInstance()) {
getErrorBuilder().mergeFrom(value);
} else {
error_ = value;
}
} else {
errorBuilder_.mergeFrom(value);
}
if (error_ != null) {
bitField0_ |= 0x00000008;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Output only. Original error response from underlying Google API, if
* available.
* </pre>
*
* <code>.google.rpc.Status error = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public Builder clearError() {
bitField0_ = (bitField0_ & ~0x00000008);
error_ = null;
if (errorBuilder_ != null) {
errorBuilder_.dispose();
errorBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Original error response from underlying Google API, if
* available.
* </pre>
*
* <code>.google.rpc.Status error = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public com.google.rpc.Status.Builder getErrorBuilder() {
bitField0_ |= 0x00000008;
onChanged();
return getErrorFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Output only. Original error response from underlying Google API, if
* available.
* </pre>
*
* <code>.google.rpc.Status error = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public com.google.rpc.StatusOrBuilder getErrorOrBuilder() {
if (errorBuilder_ != null) {
return errorBuilder_.getMessageOrBuilder();
} else {
return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_;
}
}
/**
*
*
* <pre>
* Output only. Original error response from underlying Google API, if
* available.
* </pre>
*
* <code>.google.rpc.Status error = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>
getErrorFieldBuilder() {
if (errorBuilder_ == null) {
errorBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.rpc.Status,
com.google.rpc.Status.Builder,
com.google.rpc.StatusOrBuilder>(getError(), getParentForChildren(), isClean());
error_ = null;
}
return errorBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.config.v1.TerraformError)
}
// @@protoc_insertion_point(class_scope:google.cloud.config.v1.TerraformError)
private static final com.google.cloud.config.v1.TerraformError DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.config.v1.TerraformError();
}
public static com.google.cloud.config.v1.TerraformError getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TerraformError> PARSER =
new com.google.protobuf.AbstractParser<TerraformError>() {
@java.lang.Override
public TerraformError parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<TerraformError> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<TerraformError> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.config.v1.TerraformError getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
google/startup-os | 36,568 | common/tests/FileUtilsTest.java | /*
* Copyright 2018 The StartupOS Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.startupos.common.tests;
import static java.nio.charset.StandardCharsets.UTF_8;
import static junit.framework.TestCase.assertEquals;
import static junit.framework.TestCase.assertTrue;
import static org.junit.Assert.assertFalse;
import com.google.common.collect.ImmutableList;
import com.google.common.jimfs.Configuration;
import com.google.common.jimfs.Jimfs;
import com.google.startupos.common.CommonComponent;
import com.google.startupos.common.CommonModule;
import com.google.startupos.common.DaggerCommonComponent;
import com.google.startupos.common.FileUtils;
import com.google.startupos.common.tests.Protos.TestMessage;
import dagger.Provides;
import java.io.IOException;
import java.nio.file.FileSystem;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Collection;
import javax.inject.Singleton;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
@RunWith(Parameterized.class)
public class FileUtilsTest {
private static final String TEST_DIR_PATH = "/root/foo";
private static final String TEST_FILE_PATH = "/root/foo.txt";
private static final String TEST_PROTO_BINARY_FILE_PATH = "/root/foo.pb";
private static final String TEST_PROTOTXT_FILE_PATH = "/root/test.prototxt";
@Parameters(name = "{1}")
public static Collection<Object[]> parameters() {
return Arrays.asList(
new Object[][] {
{Configuration.unix(), "Unix"},
{Configuration.osX(), "OSX"},
{Configuration.windows(), "Windows"}
});
}
private final Configuration fileSystemConfig;
private final String fileSystemName;
private FileSystem fileSystem;
private FileUtils fileUtils;
public FileUtilsTest(Configuration fileSystemConfig, String fileSystemName) {
this.fileSystemConfig = fileSystemConfig;
this.fileSystemName = fileSystemName;
}
@Before
public void setup() {
fileSystem = Jimfs.newFileSystem(fileSystemConfig);
CommonComponent commonComponent =
DaggerCommonComponent.builder()
.commonModule(
new CommonModule() {
@Provides
@Singleton
@Override
public FileSystem provideDefaultFileSystem() {
return fileSystem;
}
})
.build();
fileUtils = commonComponent.getFileUtils();
}
@Test
public void testFolderExistsIsTrueWhenFolder() throws IOException {
// TODO: Add tests for Windows. Currently, jimfs says: "Jimfs does not currently support the
// "Windows syntax for an absolute path on the current drive".
if (fileSystemName.equals("Windows")) {
return;
}
Path testPath = fileSystem.getPath(TEST_DIR_PATH);
Files.createDirectories(testPath);
assertTrue(fileUtils.folderExists(TEST_DIR_PATH));
}
@Test
public void testFolderExistsIsFalseWhenFile() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Path testPath = fileSystem.getPath(TEST_FILE_PATH);
Files.createDirectories(testPath.getParent());
Files.write(testPath, "hello world".getBytes(UTF_8));
assertFalse(fileUtils.folderExists(TEST_FILE_PATH));
}
@Test
public void testFolderExistsIsFalseWhenNothing() {
if (fileSystemName.equals("Windows")) {
return;
}
assertFalse(fileUtils.folderExists(TEST_FILE_PATH));
}
@Test
public void testFileExistsIsTrueWhenFile() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Path testPath = fileSystem.getPath(TEST_FILE_PATH);
Files.createDirectories(testPath.getParent());
Files.write(testPath, "hello world".getBytes(UTF_8));
assertTrue(fileUtils.fileExists(TEST_FILE_PATH));
}
@Test
public void testFileExistsIsFalseWhenFolder() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Path testPath = fileSystem.getPath(TEST_DIR_PATH);
Files.createDirectories(testPath);
assertFalse(fileUtils.fileExists(TEST_FILE_PATH));
}
@Test
public void testFileExistsIsFalseWhenNothing() {
if (fileSystemName.equals("Windows")) {
return;
}
assertFalse(fileUtils.fileExists(TEST_FILE_PATH));
}
@Test
public void testFileOrFolderExistsIsTrueWhenFile() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Path testPath = fileSystem.getPath(TEST_FILE_PATH);
Files.createDirectories(testPath.getParent());
Files.write(testPath, "hello world".getBytes(UTF_8));
assertTrue(fileUtils.fileOrFolderExists(TEST_FILE_PATH));
}
@Test
public void testFileOrFolderExistsIsTrueWhenFolder() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Path testPath = fileSystem.getPath(TEST_DIR_PATH);
Files.createDirectories(testPath);
assertTrue(fileUtils.fileOrFolderExists(TEST_DIR_PATH));
}
@Test
public void testFileOrFolderExistsIsFalseWhenNothing() {
if (fileSystemName.equals("Windows")) {
return;
}
assertFalse(fileUtils.fileOrFolderExists(TEST_DIR_PATH));
assertFalse(fileUtils.fileOrFolderExists(TEST_FILE_PATH));
}
@Test
public void testMkdirsWhenFolder() {
if (fileSystemName.equals("Windows")) {
return;
}
fileUtils.mkdirs(TEST_DIR_PATH);
assertTrue(Files.isDirectory(fileSystem.getPath(TEST_DIR_PATH)));
}
@Test
public void testMkdirsWhenFolderNameContainsDot() {
if (fileSystemName.equals("Windows")) {
return;
}
fileUtils.mkdirs(TEST_FILE_PATH);
assertTrue(Files.isDirectory(fileSystem.getPath(TEST_FILE_PATH)));
}
@Test
public void testExpandHomeDirectoryWhenPathStartsWithTilde() {
if (fileSystemName.equals("Windows")) {
return;
}
String path = fileUtils.expandHomeDirectory("~" + TEST_DIR_PATH);
assertEquals(System.getProperty("user.home") + TEST_DIR_PATH, path);
}
@Test
public void testExpandHomeDirectoryWhenPathDoesNotStartWithTilde() {
if (fileSystemName.equals("Windows")) {
return;
}
String path = fileUtils.expandHomeDirectory(TEST_DIR_PATH);
assertEquals(TEST_DIR_PATH, path);
}
@Test
public void testExpandHomeDirectoryWhenPathIsEmptyString() {
if (fileSystemName.equals("Windows")) {
return;
}
String path = fileUtils.expandHomeDirectory("");
assertEquals("", path);
}
@Test
public void testWriteStringWhenFileIsEmpty() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Path testPath = fileSystem.getPath(TEST_FILE_PATH);
fileUtils.writeString("hello world", TEST_FILE_PATH);
assertEquals("hello world", new String(Files.readAllBytes(testPath), UTF_8));
}
@Test
public void testWriteStringOneLineWithNewLine() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Path testPath = fileSystem.getPath(TEST_FILE_PATH);
fileUtils.writeString("hello world\n", TEST_FILE_PATH);
assertEquals("hello world\n", new String(Files.readAllBytes(testPath), UTF_8));
}
@Test
public void testWriteStringOneLineWithTwoNewLinesInTheEnd() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Path testPath = fileSystem.getPath(TEST_FILE_PATH);
fileUtils.writeString("hello world\n\n", TEST_FILE_PATH);
assertEquals("hello world\n\n", new String(Files.readAllBytes(testPath), UTF_8));
}
@Test
public void testWriteStringTwoLinesWithNewLines() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Path testPath = fileSystem.getPath(TEST_FILE_PATH);
Files.createDirectories(testPath.getParent());
fileUtils.writeString("first line\nsecond line\n", TEST_FILE_PATH);
assertEquals("first line\nsecond line\n", new String(Files.readAllBytes(testPath), UTF_8));
}
@Test
public void testWriteStringWhenFileIsNotEmpty() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Path testPath = fileSystem.getPath(TEST_FILE_PATH);
Files.createDirectories(testPath.getParent());
Files.write(testPath, "first line".getBytes(UTF_8));
fileUtils.writeString("second line", TEST_FILE_PATH);
assertEquals("second line", new String(Files.readAllBytes(testPath), UTF_8));
}
@Test
public void testWriteStringWhenFileWithoutExtension() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Path testPath = fileSystem.getPath(TEST_DIR_PATH);
fileUtils.writeString("hello world", TEST_DIR_PATH);
assertEquals("hello world", new String(Files.readAllBytes(testPath), UTF_8));
}
@Test
public void testWriteStringWhenPathWithoutParentFolder() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Path testPath = fileSystem.getPath("/foo.txt");
fileUtils.writeString("hello world", "/foo.txt");
assertEquals("hello world", new String(Files.readAllBytes(testPath), UTF_8));
}
@Test(expected = RuntimeException.class)
public void testWriteStringUncheckedWithException() {
if (fileSystemName.equals("Windows")) {
throw new RuntimeException();
}
fileUtils.writeStringUnchecked("hello world", "");
}
@Test
public void testReadFileWhenOneLine() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Path testPath = fileSystem.getPath(TEST_FILE_PATH);
Files.createDirectories(testPath.getParent());
Files.write(testPath, "first line".getBytes(UTF_8));
String content = fileUtils.readFile(TEST_FILE_PATH);
assertEquals("first line", content);
}
@Test
public void testReadFileWhenOneLineWithNewLineInTheEnd() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Path testPath = fileSystem.getPath(TEST_FILE_PATH);
Files.createDirectories(testPath.getParent());
Files.write(testPath, "first line\n".getBytes(UTF_8));
String content = fileUtils.readFile(TEST_FILE_PATH);
assertEquals("first line\n", content);
}
@Test
public void testReadFileWhenOneLineWithTwoNewLinesInTheEnd() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Path testPath = fileSystem.getPath(TEST_FILE_PATH);
Files.createDirectories(testPath.getParent());
Files.write(testPath, "first line\n\n".getBytes(UTF_8));
String content = fileUtils.readFile(TEST_FILE_PATH);
assertEquals("first line\n\n", content);
}
@Test
public void testReadFileWhenTwoLine() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Path testPath = fileSystem.getPath(TEST_FILE_PATH);
Files.createDirectories(testPath.getParent());
Files.write(testPath, "first line\nsecond line".getBytes(UTF_8));
String content = fileUtils.readFile(TEST_FILE_PATH);
assertEquals("first line\nsecond line", content);
}
@Test
public void testReadFileWhenTwoLineWithNewLineInTheEnd() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Path testPath = fileSystem.getPath(TEST_FILE_PATH);
Files.createDirectories(testPath.getParent());
Files.write(testPath, "first line\nsecond line\n".getBytes(UTF_8));
String content = fileUtils.readFile(TEST_FILE_PATH);
assertEquals("first line\nsecond line\n", content);
}
@Test
public void testReadFileWhenIsEmpty() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Path testPath = fileSystem.getPath(TEST_FILE_PATH);
Files.createDirectories(testPath.getParent());
Files.write(testPath, "".getBytes(UTF_8));
String content = fileUtils.readFile(TEST_FILE_PATH);
assertEquals("", content);
}
@Test
public void testReadFileWhenIsEmptyWithNewLine() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Path testPath = fileSystem.getPath(TEST_FILE_PATH);
Files.createDirectories(testPath.getParent());
Files.write(testPath, "\n".getBytes(UTF_8));
String content = fileUtils.readFile(TEST_FILE_PATH);
assertEquals("\n", content);
}
@Test(expected = RuntimeException.class)
public void testReadFileUncheckedWithException() {
if (fileSystemName.equals("Windows")) {
throw new RuntimeException();
}
fileUtils.readFileUnchecked("");
}
@Test
public void testListContentsWhenOneFile() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Path testPath = fileSystem.getPath(TEST_FILE_PATH);
Files.createDirectories(testPath.getParent());
Files.createFile(testPath);
ImmutableList<String> names = fileUtils.listContents("/root");
assertEquals(ImmutableList.of("foo.txt"), names);
}
@Test
public void testListContentsWhenOneFolder() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Path testPath = fileSystem.getPath(TEST_DIR_PATH);
Files.createDirectories(testPath);
ImmutableList<String> names = fileUtils.listContents("/root");
assertEquals(ImmutableList.of("foo"), names);
}
@Test
public void testListContentsWhenFileWithoutExtension() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Path testPath = fileSystem.getPath(TEST_FILE_PATH);
Files.createDirectories(testPath);
Files.createFile(fileSystem.getPath(TEST_FILE_PATH + "/foo"));
ImmutableList<String> names = fileUtils.listContents(TEST_FILE_PATH);
assertEquals(ImmutableList.of("foo"), names);
}
@Test
public void testListContentsWhenTwoFiles() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Files.createDirectories(fileSystem.getPath("/foo"));
Files.createFile(fileSystem.getPath("/foo/first_file.txt"));
Files.createFile(fileSystem.getPath("/foo/second_file.txt"));
ImmutableList<String> names = fileUtils.listContents("/foo");
assertEquals(ImmutableList.of("first_file.txt", "second_file.txt"), names);
}
@Test
public void testListContentsWhenTwoFolders() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Files.createDirectories(fileSystem.getPath("/foo/first_folder"));
Files.createDirectories(fileSystem.getPath("/foo/second_folder"));
ImmutableList<String> names = fileUtils.listContents("/foo");
assertEquals(ImmutableList.of("first_folder", "second_folder"), names);
}
@Test
public void testListContentsWhenTwoFoldersAndTwoFiles() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Files.createDirectories(fileSystem.getPath("/foo/first_folder"));
Files.createDirectories(fileSystem.getPath("/foo/second_folder"));
Files.createFile(fileSystem.getPath("/foo/first_file.txt"));
Files.createFile(fileSystem.getPath("/foo/second_file.txt"));
ImmutableList<String> names = fileUtils.listContents("/foo");
assertEquals(
ImmutableList.of("first_file.txt", "first_folder", "second_file.txt", "second_folder"),
names);
}
@Test
public void testListContentsWhenPathDoesNotContainFilesAndFolders() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Files.createDirectories(fileSystem.getPath(TEST_DIR_PATH));
ImmutableList<String> names = fileUtils.listContents(TEST_DIR_PATH);
assertEquals(ImmutableList.of(), names);
}
@Test
public void testListContentsWhenFileInSubdirectory() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Files.createDirectories(fileSystem.getPath(TEST_DIR_PATH + "/subdirectory"));
Files.createFile(fileSystem.getPath(TEST_DIR_PATH + "/subdirectory" + "/foo.txt"));
ImmutableList<String> names = fileUtils.listContents(TEST_DIR_PATH);
assertEquals(ImmutableList.of("subdirectory"), names);
}
@Test
public void testListContentsRecursively() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Files.createDirectories(fileSystem.getPath("/empty_folder"));
Files.createDirectories(fileSystem.getPath("/path/to/folder"));
Files.createFile(fileSystem.getPath("/first_file.txt"));
Files.createFile(fileSystem.getPath("/path/to/folder/second_file.txt"));
Files.createFile(fileSystem.getPath("/path/to/folder/third_file.txt"));
ImmutableList<String> paths = fileUtils.listContentsRecursively("/");
assertEquals(
ImmutableList.of(
"/",
"/empty_folder",
"/first_file.txt",
"/path",
"/path/to",
"/path/to/folder",
"/path/to/folder/second_file.txt",
"/path/to/folder/third_file.txt",
"/work"),
paths);
}
@Test
public void testListContentsRecursivelyWhenEmpty() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
ImmutableList<String> paths = fileUtils.listContentsRecursively("/");
assertEquals(ImmutableList.of("/", "/work"), paths);
}
@Test(expected = NoSuchFileException.class)
public void testListContentsRecursivelyWhenDirectoryNotExists() throws IOException {
if (fileSystemName.equals("Windows")) {
throw new NoSuchFileException("");
}
ImmutableList<String> paths = fileUtils.listContentsRecursively(TEST_DIR_PATH);
assertEquals(ImmutableList.of(), paths);
}
@Test
public void testReadPrototxt() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Path testPath = fileSystem.getPath(TEST_PROTOTXT_FILE_PATH);
Files.createDirectories(testPath.getParent());
Files.write(
testPath,
("int32_field: 123\n"
+ "string_field: \"foo\"\n"
+ "map_field {\n"
+ "key: \"foo\"\n"
+ "value: 123\n"
+ "}\n"
+ "enum_field: YES")
.getBytes(UTF_8));
TestMessage actual =
(TestMessage) fileUtils.readPrototxt(TEST_PROTOTXT_FILE_PATH, TestMessage.newBuilder());
TestMessage expected =
TestMessage.newBuilder()
.setInt32Field(123)
.setStringField("foo")
.putMapField("foo", 123)
.setEnumField(TestMessage.BooleanEnum.YES)
.build();
assertEquals(expected, actual);
}
@Test(expected = RuntimeException.class)
public void testReadPrototxtUncheckedWithException() {
if (fileSystemName.equals("Windows")) {
throw new RuntimeException();
}
fileUtils.readPrototxtUnchecked("", TestMessage.newBuilder());
}
@Test
public void testWritePrototxt() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
TestMessage message =
TestMessage.newBuilder()
.setInt32Field(123)
.setStringField("foo")
.putMapField("foo", 123)
.setEnumField(TestMessage.BooleanEnum.YES)
.build();
fileUtils.writePrototxt(message, TEST_PROTOTXT_FILE_PATH);
String actual =
new String(Files.readAllBytes(fileSystem.getPath(TEST_PROTOTXT_FILE_PATH)), UTF_8);
String expected =
"int32_field: 123\n"
+ "string_field: \"foo\"\n"
+ "map_field {\n"
+ " key: \"foo\"\n"
+ " value: 123\n"
+ "}\n"
+ "enum_field: YES\n";
assertEquals(expected, actual);
}
@Test(expected = RuntimeException.class)
public void testWritePrototxtUncheckedWithException() {
if (fileSystemName.equals("Windows")) {
throw new RuntimeException();
}
fileUtils.writePrototxtUnchecked(TestMessage.getDefaultInstance(), "");
}
@Test
public void testReadProtoBinary() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
TestMessage expected =
TestMessage.newBuilder()
.setInt32Field(123)
.setStringField("foo")
.putMapField("foo", 123)
.setEnumField(TestMessage.BooleanEnum.YES)
.build();
Path testPath = fileSystem.getPath(TEST_PROTO_BINARY_FILE_PATH);
Files.createDirectories(testPath.getParent());
expected.writeTo(Files.newOutputStream(testPath));
TestMessage actual =
(TestMessage)
fileUtils.readProtoBinary(TEST_PROTO_BINARY_FILE_PATH, TestMessage.newBuilder());
assertEquals(expected, actual);
}
@Test(expected = RuntimeException.class)
public void testReadProtoBinaryUncheckedWithException() {
if (fileSystemName.equals("Windows")) {
throw new RuntimeException();
}
fileUtils.readProtoBinaryUnchecked("", TestMessage.newBuilder());
}
@Test
public void testWriteProtoBinary() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
TestMessage expected =
TestMessage.newBuilder()
.setInt32Field(123)
.setStringField("foo")
.putMapField("foo", 123)
.setEnumField(TestMessage.BooleanEnum.YES)
.build();
fileUtils.writeProtoBinary(expected, TEST_PROTO_BINARY_FILE_PATH);
Path testPath = fileSystem.getPath(TEST_PROTO_BINARY_FILE_PATH);
TestMessage actual =
TestMessage.newBuilder()
.build()
.getParserForType()
.parseFrom(Files.newInputStream(testPath));
assertEquals(expected, actual);
}
@Test(expected = RuntimeException.class)
public void testWriteProtoBinaryUncheckedWithException() {
if (fileSystemName.equals("Windows")) {
throw new RuntimeException();
}
fileUtils.writeProtoBinaryUnchecked(TestMessage.getDefaultInstance(), "");
}
@Test
public void testDeleteDirectory() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Files.createDirectories(fileSystem.getPath("/foo/empty_folder"));
Files.createDirectories(fileSystem.getPath("/foo/path/to/folder"));
Files.createFile(fileSystem.getPath("/foo/first_file.txt"));
Files.createFile(fileSystem.getPath("/foo/path/to/folder/second_file.txt"));
Files.createFile(fileSystem.getPath("/foo/path/to/folder/third_file.txt"));
fileUtils.deleteDirectory("/foo");
assertFalse(Files.isDirectory(fileSystem.getPath("/foo/empty_folder")));
assertFalse(Files.isDirectory(fileSystem.getPath("/foo/path/to/folder")));
assertFalse(Files.isRegularFile(fileSystem.getPath("/foo/first_file.txt")));
assertFalse(Files.isRegularFile(fileSystem.getPath("/foo/path/to/folder/second_file.txt")));
assertFalse(Files.isRegularFile(fileSystem.getPath("/foo/path/to/folder/third_file.txt")));
assertFalse(Files.isDirectory(fileSystem.getPath("/foo")));
}
@Test(expected = RuntimeException.class)
public void testDeleteDirectoryUnchecked() {
if (fileSystemName.equals("Windows")) {
throw new RuntimeException();
}
fileUtils.deleteDirectoryUnchecked("");
}
@Test
public void testDeleteFileOrDirectoryIfExists() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Files.createDirectories(fileSystem.getPath("/foo/folder"));
Files.createFile(fileSystem.getPath("/foo/file.txt"));
fileUtils.deleteFileOrDirectoryIfExists("/foo/folder");
fileUtils.deleteFileOrDirectoryIfExists("/foo/file.txt");
assertFalse(Files.isDirectory(fileSystem.getPath("/foo/folder")));
assertFalse(Files.isRegularFile(fileSystem.getPath("/foo/file.txt")));
}
@Test(expected = RuntimeException.class)
public void testDeleteFileOrDirectoryIfExistsUnchecked() {
if (fileSystemName.equals("Windows")) {
throw new RuntimeException();
}
fileUtils.deleteFileOrDirectoryIfExistsUnchecked("");
}
@Test
public void testClearDirectoryWhenFolders() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Files.createDirectories(fileSystem.getPath(TEST_DIR_PATH));
Files.createDirectories(fileSystem.getPath(TEST_DIR_PATH + "/first_folder"));
Files.createDirectories(fileSystem.getPath(TEST_DIR_PATH + "/path/to/folder"));
fileUtils.clearDirectory(TEST_DIR_PATH);
assertFalse(Files.isDirectory(fileSystem.getPath(TEST_DIR_PATH + "/first_folder")));
assertFalse(Files.isDirectory(fileSystem.getPath(TEST_DIR_PATH + "/path/to/folder")));
assertTrue(Files.isDirectory(fileSystem.getPath(TEST_DIR_PATH)));
}
@Test
public void testClearDirectoryWhenFiles() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Files.createDirectories(fileSystem.getPath(TEST_DIR_PATH));
Files.createFile(fileSystem.getPath(TEST_DIR_PATH + "/first_file.txt"));
Files.createFile(fileSystem.getPath(TEST_DIR_PATH + "/second_file.txt"));
fileUtils.clearDirectory(TEST_DIR_PATH);
assertFalse(Files.isRegularFile(fileSystem.getPath(TEST_DIR_PATH + "/first_file.txt")));
assertFalse(Files.isRegularFile(fileSystem.getPath(TEST_DIR_PATH + "/second_file.txt")));
assertTrue(Files.isDirectory(fileSystem.getPath(TEST_DIR_PATH)));
}
@Test
public void testClearDirectoryWhenFilesAndFolders() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Files.createDirectories(fileSystem.getPath(TEST_DIR_PATH));
Files.createDirectories(fileSystem.getPath(TEST_DIR_PATH + "/empty_folder"));
Files.createDirectories(fileSystem.getPath(TEST_DIR_PATH + "/path/to/folder"));
Files.createFile(fileSystem.getPath(TEST_DIR_PATH + "/first_file.txt"));
Files.createFile(fileSystem.getPath(TEST_DIR_PATH + "/path/to/folder/second_file.txt"));
Files.createFile(fileSystem.getPath(TEST_DIR_PATH + "/path/to/folder/third_file.txt"));
fileUtils.clearDirectory(TEST_DIR_PATH);
assertFalse(Files.isDirectory(fileSystem.getPath(TEST_DIR_PATH + "/empty_folder")));
assertFalse(Files.isDirectory(fileSystem.getPath(TEST_DIR_PATH + "/path/to/folder")));
assertFalse(Files.isRegularFile(fileSystem.getPath(TEST_DIR_PATH + "/first_file.txt")));
assertFalse(
Files.isRegularFile(fileSystem.getPath(TEST_DIR_PATH + "/path/to/folder/second_file.txt")));
assertFalse(
Files.isRegularFile(fileSystem.getPath(TEST_DIR_PATH + "/path/to/folder/third_file.txt")));
assertTrue(Files.isDirectory(fileSystem.getPath(TEST_DIR_PATH)));
}
@Test(expected = RuntimeException.class)
public void testClearDirectoryUnchecked() {
if (fileSystemName.equals("Windows")) {
throw new RuntimeException();
}
fileUtils.clearDirectoryUnchecked("/nonexistent_path");
}
@Test
public void testCopyDirectoryToDirectoryWithoutIgnored() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Files.createDirectories(fileSystem.getPath(TEST_DIR_PATH));
Files.createFile(fileSystem.getPath(TEST_DIR_PATH + "/file1.txt"));
Files.createDirectories(fileSystem.getPath(TEST_DIR_PATH + "/path/to"));
Files.createFile(fileSystem.getPath(TEST_DIR_PATH + "/path/to/file2.txt"));
fileUtils.copyDirectoryToDirectory(TEST_DIR_PATH, "destination_folder");
assertTrue(Files.isRegularFile(fileSystem.getPath("destination_folder" + "/file1.txt")));
assertTrue(
Files.isRegularFile(fileSystem.getPath("destination_folder" + "/path/to/file2.txt")));
}
@Test
public void testCopyDirectoryToDirectoryWhenIgnoredOneFile() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Files.createDirectories(fileSystem.getPath(TEST_DIR_PATH));
Files.createFile(fileSystem.getPath(TEST_DIR_PATH + "/some_file.txt"));
Files.createFile(fileSystem.getPath(TEST_DIR_PATH + "/file_for_ignore.txt"));
fileUtils.copyDirectoryToDirectory(TEST_DIR_PATH, "destination_folder", "file_for_ignore.txt");
assertTrue(Files.isRegularFile(fileSystem.getPath("destination_folder" + "/some_file.txt")));
assertFalse(
Files.isRegularFile(fileSystem.getPath("destination_folder" + "/file_for_ignore.txt")));
}
@Test
public void testCopyDirectoryToDirectoryWhenIgnoredTwoFiles() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Files.createDirectories(fileSystem.getPath(TEST_DIR_PATH));
Files.createFile(fileSystem.getPath(TEST_DIR_PATH + "/some_file.txt"));
Files.createFile(fileSystem.getPath(TEST_DIR_PATH + "/file_for_ignore1.txt"));
Files.createDirectories(fileSystem.getPath(TEST_DIR_PATH + "/path/to"));
Files.createFile(fileSystem.getPath(TEST_DIR_PATH + "/path/to/file_for_ignore2.txt"));
fileUtils.copyDirectoryToDirectory(
TEST_DIR_PATH,
"destination_folder",
"file_for_ignore1.txt",
"path/to/file_for_ignore2.txt");
assertTrue(Files.isRegularFile(fileSystem.getPath("destination_folder" + "/some_file.txt")));
assertFalse(
Files.isRegularFile(fileSystem.getPath("destination_folder" + "/file_for_ignore1.txt")));
assertFalse(
Files.isRegularFile(
fileSystem.getPath("destination_folder" + "path/to/file_for_ignore2.txt")));
}
@Test
public void testCopyDirectoryToDirectoryWhenIgnoredOneFolder() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Files.createDirectories(fileSystem.getPath(TEST_DIR_PATH));
Files.createDirectories(fileSystem.getPath(TEST_DIR_PATH + "/some_folder"));
Files.createFile(fileSystem.getPath(TEST_DIR_PATH + "/some_folder/some_file.txt"));
Files.createDirectories(fileSystem.getPath(TEST_DIR_PATH + "/path/to/folder_for_ignore"));
Files.createDirectories(
fileSystem.getPath(TEST_DIR_PATH + "/path/to/folder_for_ignore/internal_folder"));
Files.createFile(fileSystem.getPath(TEST_DIR_PATH + "/path/to/folder_for_ignore/file2.txt"));
fileUtils.copyDirectoryToDirectory(
TEST_DIR_PATH, "destination_folder", "path/to/folder_for_ignore");
assertTrue(Files.isDirectory(fileSystem.getPath("destination_folder" + "/some_folder")));
assertTrue(
Files.isRegularFile(
fileSystem.getPath("destination_folder" + "/some_folder/some_file.txt")));
assertTrue(Files.isDirectory(fileSystem.getPath("destination_folder" + "/path")));
assertTrue(Files.isDirectory(fileSystem.getPath("destination_folder" + "/path/to")));
assertFalse(
Files.isDirectory(
fileSystem.getPath(
"destination_folder" + "/path/to/folder_for_ignore/internal_folder")));
assertFalse(
Files.isDirectory(fileSystem.getPath("destination_folder" + "/path/to/folder_for_ignore")));
assertFalse(
Files.isRegularFile(
fileSystem.getPath("destination_folder" + "/path/to/folder_for_ignore/file2.txt")));
}
@Test
public void testCopyDirectoryToDirectoryWhenIgnoredTwoFolder() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Files.createDirectories(fileSystem.getPath(TEST_DIR_PATH));
Files.createDirectories(fileSystem.getPath(TEST_DIR_PATH + "/some_folder"));
Files.createFile(fileSystem.getPath(TEST_DIR_PATH + "/some_folder/some_file.txt"));
Files.createDirectories(fileSystem.getPath(TEST_DIR_PATH + "/folder_for_ignore1"));
Files.createFile(fileSystem.getPath(TEST_DIR_PATH + "/folder_for_ignore1/file1.txt"));
Files.createDirectories(fileSystem.getPath(TEST_DIR_PATH + "/path/to/folder_for_ignore2"));
Files.createFile(fileSystem.getPath(TEST_DIR_PATH + "/path/to/folder_for_ignore2/file2.txt"));
fileUtils.copyDirectoryToDirectory(
TEST_DIR_PATH, "destination_folder", "folder_for_ignore1", "path/to/folder_for_ignore2");
assertTrue(Files.isDirectory(fileSystem.getPath("destination_folder" + "/some_folder")));
assertTrue(
Files.isRegularFile(
fileSystem.getPath("destination_folder" + "/some_folder/some_file.txt")));
assertTrue(Files.isDirectory(fileSystem.getPath("destination_folder" + "/path")));
assertTrue(Files.isDirectory(fileSystem.getPath("destination_folder" + "/path/to")));
assertFalse(
Files.isDirectory(fileSystem.getPath("destination_folder" + "/folder_for_ignore1")));
assertFalse(
Files.isDirectory(
fileSystem.getPath("destination_folder" + "/path/to/folder_for_ignore2")));
assertFalse(
Files.isRegularFile(
fileSystem.getPath("destination_folder" + "/path/to/folder_for_ignore2/file2.txt")));
}
@Test
public void testCopyDirectoryToDirectoryWhenIgnoredTwoFolderWithRegex() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Files.createDirectories(fileSystem.getPath(TEST_DIR_PATH));
Files.createDirectories(fileSystem.getPath(TEST_DIR_PATH + "/some_folder"));
Files.createFile(fileSystem.getPath(TEST_DIR_PATH + "/some_folder/some_file.txt"));
Files.createDirectories(fileSystem.getPath(TEST_DIR_PATH + "/folder_for_ignore"));
Files.createFile(fileSystem.getPath(TEST_DIR_PATH + "/folder_for_ignore/file1.txt"));
Files.createDirectories(
fileSystem.getPath(TEST_DIR_PATH + "/path/to/folder_for_ignore_with_regex"));
Files.createFile(
fileSystem.getPath(TEST_DIR_PATH + "/path/to/folder_for_ignore_with_regex/file2.txt"));
fileUtils.copyDirectoryToDirectory(TEST_DIR_PATH, "destination_folder", "folder_for_ignore.*");
assertTrue(Files.isDirectory(fileSystem.getPath("destination_folder" + "/some_folder")));
assertTrue(
Files.isRegularFile(
fileSystem.getPath("destination_folder" + "/some_folder/some_file.txt")));
assertTrue(Files.isDirectory(fileSystem.getPath("destination_folder" + "/path")));
assertTrue(Files.isDirectory(fileSystem.getPath("destination_folder" + "/path/to")));
assertFalse(Files.isDirectory(fileSystem.getPath("destination_folder" + "/folder_for_ignore")));
assertFalse(
Files.isDirectory(
fileSystem.getPath("destination_folder" + "/path/to/folder_for_ignore_with_regex")));
assertFalse(
Files.isRegularFile(
fileSystem.getPath(
"destination_folder" + "/path/to/folder_for_ignore_with_regex/file2.txt")));
}
@Test
public void testCopyDirectoryToDirectoryWhenIgnoredFileAndFolder() throws IOException {
if (fileSystemName.equals("Windows")) {
return;
}
Files.createDirectories(fileSystem.getPath(TEST_DIR_PATH));
Files.createDirectories(fileSystem.getPath(TEST_DIR_PATH + "/some_folder"));
Files.createFile(fileSystem.getPath(TEST_DIR_PATH + "/some_folder/some_file.txt"));
Files.createFile(fileSystem.getPath(TEST_DIR_PATH + "/file1.txt"));
Files.createDirectories(fileSystem.getPath(TEST_DIR_PATH + "/path/to/folder_for_ignore"));
Files.createFile(fileSystem.getPath(TEST_DIR_PATH + "/path/to/folder_for_ignore/file2.txt"));
fileUtils.copyDirectoryToDirectory(
TEST_DIR_PATH, "destination_folder", "file1.txt", "path/to/folder_for_ignore");
assertTrue(Files.isDirectory(fileSystem.getPath("destination_folder" + "/some_folder")));
assertTrue(
Files.isRegularFile(
fileSystem.getPath("destination_folder" + "/some_folder/some_file.txt")));
assertTrue(Files.isDirectory(fileSystem.getPath("destination_folder" + "/path")));
assertTrue(Files.isDirectory(fileSystem.getPath("destination_folder" + "/path/to")));
assertFalse(Files.isRegularFile(fileSystem.getPath("destination_folder" + "/file1.txt")));
assertFalse(
Files.isDirectory(fileSystem.getPath("destination_folder" + "/path/to/folder_for_ignore")));
assertFalse(
Files.isRegularFile(
fileSystem.getPath("destination_folder" + "/path/to/folder_for_ignore/file2.txt")));
}
}
|
googleads/google-ads-java | 37,156 | google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/errors/ConversionAdjustmentUploadErrorEnum.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v19/errors/conversion_adjustment_upload_error.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v19.errors;
/**
* <pre>
* Container for enum describing possible conversion adjustment upload errors.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum}
*/
public final class ConversionAdjustmentUploadErrorEnum extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum)
ConversionAdjustmentUploadErrorEnumOrBuilder {
private static final long serialVersionUID = 0L;
// Use ConversionAdjustmentUploadErrorEnum.newBuilder() to construct.
private ConversionAdjustmentUploadErrorEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ConversionAdjustmentUploadErrorEnum() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ConversionAdjustmentUploadErrorEnum();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorProto.internal_static_google_ads_googleads_v19_errors_ConversionAdjustmentUploadErrorEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorProto.internal_static_google_ads_googleads_v19_errors_ConversionAdjustmentUploadErrorEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum.class, com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum.Builder.class);
}
/**
* <pre>
* Enum describing possible conversion adjustment upload errors.
* </pre>
*
* Protobuf enum {@code google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum.ConversionAdjustmentUploadError}
*/
public enum ConversionAdjustmentUploadError
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <pre>
* Not specified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
UNSPECIFIED(0),
/**
* <pre>
* Used for return value only. Represents value unknown in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
UNKNOWN(1),
/**
* <pre>
* Can't import events to a conversion action that was just created. Try
* importing again in 6 hours.
* </pre>
*
* <code>TOO_RECENT_CONVERSION_ACTION = 2;</code>
*/
TOO_RECENT_CONVERSION_ACTION(2),
/**
* <pre>
* The conversion was already retracted. This adjustment was not processed.
* </pre>
*
* <code>CONVERSION_ALREADY_RETRACTED = 4;</code>
*/
CONVERSION_ALREADY_RETRACTED(4),
/**
* <pre>
* The conversion for this conversion action and conversion identifier can't
* be found. Make sure your conversion identifiers are associated with the
* correct conversion action and try again.
* </pre>
*
* <code>CONVERSION_NOT_FOUND = 5;</code>
*/
CONVERSION_NOT_FOUND(5),
/**
* <pre>
* Adjustment can't be made to a conversion that occurred more than 54 days
* ago.
* </pre>
*
* <code>CONVERSION_EXPIRED = 6;</code>
*/
CONVERSION_EXPIRED(6),
/**
* <pre>
* Adjustment has an `adjustment_date_time` that occurred before the
* associated conversion. Make sure your `adjustment_date_time` is correct
* and try again.
* </pre>
*
* <code>ADJUSTMENT_PRECEDES_CONVERSION = 7;</code>
*/
ADJUSTMENT_PRECEDES_CONVERSION(7),
/**
* <pre>
* More recent adjustment `adjustment_date_time` has already been reported
* for the associated conversion. Make sure your adjustment
* `adjustment_date_time` is correct and try again.
* </pre>
*
* <code>MORE_RECENT_RESTATEMENT_FOUND = 8;</code>
*/
MORE_RECENT_RESTATEMENT_FOUND(8),
/**
* <pre>
* Adjustment can't be recorded because the conversion occurred too
* recently. Try adjusting a conversion that occurred at least 24 hours ago.
* </pre>
*
* <code>TOO_RECENT_CONVERSION = 9;</code>
*/
TOO_RECENT_CONVERSION(9),
/**
* <pre>
* Can't make an adjustment to a conversion that is set up to use the
* default value. Check your conversion action value setting and try again.
* </pre>
*
* <code>CANNOT_RESTATE_CONVERSION_ACTION_THAT_ALWAYS_USES_DEFAULT_CONVERSION_VALUE = 10;</code>
*/
CANNOT_RESTATE_CONVERSION_ACTION_THAT_ALWAYS_USES_DEFAULT_CONVERSION_VALUE(10),
/**
* <pre>
* Try uploading fewer than 2001 adjustments in a single API request.
* </pre>
*
* <code>TOO_MANY_ADJUSTMENTS_IN_REQUEST = 11;</code>
*/
TOO_MANY_ADJUSTMENTS_IN_REQUEST(11),
/**
* <pre>
* The conversion has already been adjusted the maximum number of times.
* Make sure you're only making necessary adjustment to existing conversion.
* </pre>
*
* <code>TOO_MANY_ADJUSTMENTS = 12;</code>
*/
TOO_MANY_ADJUSTMENTS(12),
/**
* <pre>
* The conversion has prior a restatement with the same
* `adjustment_date_time`. Make sure your adjustment has the correct and
* unique `adjustment_date_time` and try again.
* </pre>
*
* <code>RESTATEMENT_ALREADY_EXISTS = 13;</code>
*/
RESTATEMENT_ALREADY_EXISTS(13),
/**
* <pre>
* Imported adjustment has a duplicate conversion adjustment with same
* `adjustment_date_time`. Make sure your adjustment has the correct
* `adjustment_date_time` and try again.
* </pre>
*
* <code>DUPLICATE_ADJUSTMENT_IN_REQUEST = 14;</code>
*/
DUPLICATE_ADJUSTMENT_IN_REQUEST(14),
/**
* <pre>
* Make sure you agree to the customer data processing terms in conversion
* settings and try again.
* </pre>
*
* <code>CUSTOMER_NOT_ACCEPTED_CUSTOMER_DATA_TERMS = 15;</code>
*/
CUSTOMER_NOT_ACCEPTED_CUSTOMER_DATA_TERMS(15),
/**
* <pre>
* Can't use enhanced conversions with the specified conversion action.
* </pre>
*
* <code>CONVERSION_ACTION_NOT_ELIGIBLE_FOR_ENHANCEMENT = 16;</code>
*/
CONVERSION_ACTION_NOT_ELIGIBLE_FOR_ENHANCEMENT(16),
/**
* <pre>
* Make sure you hash user provided data using SHA-256 and ensure you are
* normalizing according to the guidelines.
* </pre>
*
* <code>INVALID_USER_IDENTIFIER = 17;</code>
*/
INVALID_USER_IDENTIFIER(17),
/**
* <pre>
* Use user provided data such as emails or phone numbers hashed using
* SHA-256 and try again.
* </pre>
*
* <code>UNSUPPORTED_USER_IDENTIFIER = 18;</code>
*/
UNSUPPORTED_USER_IDENTIFIER(18),
/**
* <pre>
* Cannot set both gclid_date_time_pair and order_id. Use only 1 type and
* try again.
* </pre>
*
* <code>GCLID_DATE_TIME_PAIR_AND_ORDER_ID_BOTH_SET = 20;</code>
*/
GCLID_DATE_TIME_PAIR_AND_ORDER_ID_BOTH_SET(20),
/**
* <pre>
* Conversion already has enhancements with the same Order ID and conversion
* action. Make sure your data is correctly configured and try again.
* </pre>
*
* <code>CONVERSION_ALREADY_ENHANCED = 21;</code>
*/
CONVERSION_ALREADY_ENHANCED(21),
/**
* <pre>
* Multiple enhancements have the same conversion action and Order ID. Make
* sure your data is correctly configured and try again.
* </pre>
*
* <code>DUPLICATE_ENHANCEMENT_IN_REQUEST = 22;</code>
*/
DUPLICATE_ENHANCEMENT_IN_REQUEST(22),
/**
* <pre>
* Enhanced conversions can't be used for this account because of Google
* customer data policies. Contact your Google representative.
* </pre>
*
* <code>CUSTOMER_DATA_POLICY_PROHIBITS_ENHANCEMENT = 23;</code>
*/
CUSTOMER_DATA_POLICY_PROHIBITS_ENHANCEMENT(23),
/**
* <pre>
* Adjustment for website conversion requires Order ID (ie, transaction ID).
* Make sure your website tags capture Order IDs and you send the same Order
* IDs with your adjustment.
* </pre>
*
* <code>MISSING_ORDER_ID_FOR_WEBPAGE = 24;</code>
*/
MISSING_ORDER_ID_FOR_WEBPAGE(24),
/**
* <pre>
* Can't use adjustment with Order IDs containing personally-identifiable
* information (PII).
* </pre>
*
* <code>ORDER_ID_CONTAINS_PII = 25;</code>
*/
ORDER_ID_CONTAINS_PII(25),
/**
* <pre>
* The provided job id in the request is not within the allowed range. A job
* ID must be a positive integer in the range [1, 2^31).
* </pre>
*
* <code>INVALID_JOB_ID = 26;</code>
*/
INVALID_JOB_ID(26),
/**
* <pre>
* The conversion action specified in the adjustment request cannot be
* found. Make sure it's available in this account.
* </pre>
*
* <code>NO_CONVERSION_ACTION_FOUND = 27;</code>
*/
NO_CONVERSION_ACTION_FOUND(27),
/**
* <pre>
* The type of the conversion action specified in the adjustment request
* isn't supported for uploading adjustments. A conversion adjustment of
* type `RETRACTION` or `RESTATEMENT` is only permitted for conversion
* actions of type `SALESFORCE`, `UPLOAD_CLICK` or `WEBPAGE`. A conversion
* adjustment of type `ENHANCEMENT` is only permitted for conversion
* actions of type `WEBPAGE`.
* </pre>
*
* <code>INVALID_CONVERSION_ACTION_TYPE = 28;</code>
*/
INVALID_CONVERSION_ACTION_TYPE(28),
UNRECOGNIZED(-1),
;
/**
* <pre>
* Not specified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
public static final int UNSPECIFIED_VALUE = 0;
/**
* <pre>
* Used for return value only. Represents value unknown in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
public static final int UNKNOWN_VALUE = 1;
/**
* <pre>
* Can't import events to a conversion action that was just created. Try
* importing again in 6 hours.
* </pre>
*
* <code>TOO_RECENT_CONVERSION_ACTION = 2;</code>
*/
public static final int TOO_RECENT_CONVERSION_ACTION_VALUE = 2;
/**
* <pre>
* The conversion was already retracted. This adjustment was not processed.
* </pre>
*
* <code>CONVERSION_ALREADY_RETRACTED = 4;</code>
*/
public static final int CONVERSION_ALREADY_RETRACTED_VALUE = 4;
/**
* <pre>
* The conversion for this conversion action and conversion identifier can't
* be found. Make sure your conversion identifiers are associated with the
* correct conversion action and try again.
* </pre>
*
* <code>CONVERSION_NOT_FOUND = 5;</code>
*/
public static final int CONVERSION_NOT_FOUND_VALUE = 5;
/**
* <pre>
* Adjustment can't be made to a conversion that occurred more than 54 days
* ago.
* </pre>
*
* <code>CONVERSION_EXPIRED = 6;</code>
*/
public static final int CONVERSION_EXPIRED_VALUE = 6;
/**
* <pre>
* Adjustment has an `adjustment_date_time` that occurred before the
* associated conversion. Make sure your `adjustment_date_time` is correct
* and try again.
* </pre>
*
* <code>ADJUSTMENT_PRECEDES_CONVERSION = 7;</code>
*/
public static final int ADJUSTMENT_PRECEDES_CONVERSION_VALUE = 7;
/**
* <pre>
* More recent adjustment `adjustment_date_time` has already been reported
* for the associated conversion. Make sure your adjustment
* `adjustment_date_time` is correct and try again.
* </pre>
*
* <code>MORE_RECENT_RESTATEMENT_FOUND = 8;</code>
*/
public static final int MORE_RECENT_RESTATEMENT_FOUND_VALUE = 8;
/**
* <pre>
* Adjustment can't be recorded because the conversion occurred too
* recently. Try adjusting a conversion that occurred at least 24 hours ago.
* </pre>
*
* <code>TOO_RECENT_CONVERSION = 9;</code>
*/
public static final int TOO_RECENT_CONVERSION_VALUE = 9;
/**
* <pre>
* Can't make an adjustment to a conversion that is set up to use the
* default value. Check your conversion action value setting and try again.
* </pre>
*
* <code>CANNOT_RESTATE_CONVERSION_ACTION_THAT_ALWAYS_USES_DEFAULT_CONVERSION_VALUE = 10;</code>
*/
public static final int CANNOT_RESTATE_CONVERSION_ACTION_THAT_ALWAYS_USES_DEFAULT_CONVERSION_VALUE_VALUE = 10;
/**
* <pre>
* Try uploading fewer than 2001 adjustments in a single API request.
* </pre>
*
* <code>TOO_MANY_ADJUSTMENTS_IN_REQUEST = 11;</code>
*/
public static final int TOO_MANY_ADJUSTMENTS_IN_REQUEST_VALUE = 11;
/**
* <pre>
* The conversion has already been adjusted the maximum number of times.
* Make sure you're only making necessary adjustment to existing conversion.
* </pre>
*
* <code>TOO_MANY_ADJUSTMENTS = 12;</code>
*/
public static final int TOO_MANY_ADJUSTMENTS_VALUE = 12;
/**
* <pre>
* The conversion has prior a restatement with the same
* `adjustment_date_time`. Make sure your adjustment has the correct and
* unique `adjustment_date_time` and try again.
* </pre>
*
* <code>RESTATEMENT_ALREADY_EXISTS = 13;</code>
*/
public static final int RESTATEMENT_ALREADY_EXISTS_VALUE = 13;
/**
* <pre>
* Imported adjustment has a duplicate conversion adjustment with same
* `adjustment_date_time`. Make sure your adjustment has the correct
* `adjustment_date_time` and try again.
* </pre>
*
* <code>DUPLICATE_ADJUSTMENT_IN_REQUEST = 14;</code>
*/
public static final int DUPLICATE_ADJUSTMENT_IN_REQUEST_VALUE = 14;
/**
* <pre>
* Make sure you agree to the customer data processing terms in conversion
* settings and try again.
* </pre>
*
* <code>CUSTOMER_NOT_ACCEPTED_CUSTOMER_DATA_TERMS = 15;</code>
*/
public static final int CUSTOMER_NOT_ACCEPTED_CUSTOMER_DATA_TERMS_VALUE = 15;
/**
* <pre>
* Can't use enhanced conversions with the specified conversion action.
* </pre>
*
* <code>CONVERSION_ACTION_NOT_ELIGIBLE_FOR_ENHANCEMENT = 16;</code>
*/
public static final int CONVERSION_ACTION_NOT_ELIGIBLE_FOR_ENHANCEMENT_VALUE = 16;
/**
* <pre>
* Make sure you hash user provided data using SHA-256 and ensure you are
* normalizing according to the guidelines.
* </pre>
*
* <code>INVALID_USER_IDENTIFIER = 17;</code>
*/
public static final int INVALID_USER_IDENTIFIER_VALUE = 17;
/**
* <pre>
* Use user provided data such as emails or phone numbers hashed using
* SHA-256 and try again.
* </pre>
*
* <code>UNSUPPORTED_USER_IDENTIFIER = 18;</code>
*/
public static final int UNSUPPORTED_USER_IDENTIFIER_VALUE = 18;
/**
* <pre>
* Cannot set both gclid_date_time_pair and order_id. Use only 1 type and
* try again.
* </pre>
*
* <code>GCLID_DATE_TIME_PAIR_AND_ORDER_ID_BOTH_SET = 20;</code>
*/
public static final int GCLID_DATE_TIME_PAIR_AND_ORDER_ID_BOTH_SET_VALUE = 20;
/**
* <pre>
* Conversion already has enhancements with the same Order ID and conversion
* action. Make sure your data is correctly configured and try again.
* </pre>
*
* <code>CONVERSION_ALREADY_ENHANCED = 21;</code>
*/
public static final int CONVERSION_ALREADY_ENHANCED_VALUE = 21;
/**
* <pre>
* Multiple enhancements have the same conversion action and Order ID. Make
* sure your data is correctly configured and try again.
* </pre>
*
* <code>DUPLICATE_ENHANCEMENT_IN_REQUEST = 22;</code>
*/
public static final int DUPLICATE_ENHANCEMENT_IN_REQUEST_VALUE = 22;
/**
* <pre>
* Enhanced conversions can't be used for this account because of Google
* customer data policies. Contact your Google representative.
* </pre>
*
* <code>CUSTOMER_DATA_POLICY_PROHIBITS_ENHANCEMENT = 23;</code>
*/
public static final int CUSTOMER_DATA_POLICY_PROHIBITS_ENHANCEMENT_VALUE = 23;
/**
* <pre>
* Adjustment for website conversion requires Order ID (ie, transaction ID).
* Make sure your website tags capture Order IDs and you send the same Order
* IDs with your adjustment.
* </pre>
*
* <code>MISSING_ORDER_ID_FOR_WEBPAGE = 24;</code>
*/
public static final int MISSING_ORDER_ID_FOR_WEBPAGE_VALUE = 24;
/**
* <pre>
* Can't use adjustment with Order IDs containing personally-identifiable
* information (PII).
* </pre>
*
* <code>ORDER_ID_CONTAINS_PII = 25;</code>
*/
public static final int ORDER_ID_CONTAINS_PII_VALUE = 25;
/**
* <pre>
* The provided job id in the request is not within the allowed range. A job
* ID must be a positive integer in the range [1, 2^31).
* </pre>
*
* <code>INVALID_JOB_ID = 26;</code>
*/
public static final int INVALID_JOB_ID_VALUE = 26;
/**
* <pre>
* The conversion action specified in the adjustment request cannot be
* found. Make sure it's available in this account.
* </pre>
*
* <code>NO_CONVERSION_ACTION_FOUND = 27;</code>
*/
public static final int NO_CONVERSION_ACTION_FOUND_VALUE = 27;
/**
* <pre>
* The type of the conversion action specified in the adjustment request
* isn't supported for uploading adjustments. A conversion adjustment of
* type `RETRACTION` or `RESTATEMENT` is only permitted for conversion
* actions of type `SALESFORCE`, `UPLOAD_CLICK` or `WEBPAGE`. A conversion
* adjustment of type `ENHANCEMENT` is only permitted for conversion
* actions of type `WEBPAGE`.
* </pre>
*
* <code>INVALID_CONVERSION_ACTION_TYPE = 28;</code>
*/
public static final int INVALID_CONVERSION_ACTION_TYPE_VALUE = 28;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static ConversionAdjustmentUploadError valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static ConversionAdjustmentUploadError forNumber(int value) {
switch (value) {
case 0: return UNSPECIFIED;
case 1: return UNKNOWN;
case 2: return TOO_RECENT_CONVERSION_ACTION;
case 4: return CONVERSION_ALREADY_RETRACTED;
case 5: return CONVERSION_NOT_FOUND;
case 6: return CONVERSION_EXPIRED;
case 7: return ADJUSTMENT_PRECEDES_CONVERSION;
case 8: return MORE_RECENT_RESTATEMENT_FOUND;
case 9: return TOO_RECENT_CONVERSION;
case 10: return CANNOT_RESTATE_CONVERSION_ACTION_THAT_ALWAYS_USES_DEFAULT_CONVERSION_VALUE;
case 11: return TOO_MANY_ADJUSTMENTS_IN_REQUEST;
case 12: return TOO_MANY_ADJUSTMENTS;
case 13: return RESTATEMENT_ALREADY_EXISTS;
case 14: return DUPLICATE_ADJUSTMENT_IN_REQUEST;
case 15: return CUSTOMER_NOT_ACCEPTED_CUSTOMER_DATA_TERMS;
case 16: return CONVERSION_ACTION_NOT_ELIGIBLE_FOR_ENHANCEMENT;
case 17: return INVALID_USER_IDENTIFIER;
case 18: return UNSUPPORTED_USER_IDENTIFIER;
case 20: return GCLID_DATE_TIME_PAIR_AND_ORDER_ID_BOTH_SET;
case 21: return CONVERSION_ALREADY_ENHANCED;
case 22: return DUPLICATE_ENHANCEMENT_IN_REQUEST;
case 23: return CUSTOMER_DATA_POLICY_PROHIBITS_ENHANCEMENT;
case 24: return MISSING_ORDER_ID_FOR_WEBPAGE;
case 25: return ORDER_ID_CONTAINS_PII;
case 26: return INVALID_JOB_ID;
case 27: return NO_CONVERSION_ACTION_FOUND;
case 28: return INVALID_CONVERSION_ACTION_TYPE;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<ConversionAdjustmentUploadError>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<
ConversionAdjustmentUploadError> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<ConversionAdjustmentUploadError>() {
public ConversionAdjustmentUploadError findValueByNumber(int number) {
return ConversionAdjustmentUploadError.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum.getDescriptor().getEnumTypes().get(0);
}
private static final ConversionAdjustmentUploadError[] VALUES = values();
public static ConversionAdjustmentUploadError valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private ConversionAdjustmentUploadError(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum.ConversionAdjustmentUploadError)
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum)) {
return super.equals(obj);
}
com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum other = (com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Container for enum describing possible conversion adjustment upload errors.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum)
com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnumOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorProto.internal_static_google_ads_googleads_v19_errors_ConversionAdjustmentUploadErrorEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorProto.internal_static_google_ads_googleads_v19_errors_ConversionAdjustmentUploadErrorEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum.class, com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum.Builder.class);
}
// Construct using com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorProto.internal_static_google_ads_googleads_v19_errors_ConversionAdjustmentUploadErrorEnum_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum getDefaultInstanceForType() {
return com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum build() {
com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum buildPartial() {
com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum result = new com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum) {
return mergeFrom((com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum other) {
if (other == com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum)
private static final com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum();
}
public static com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ConversionAdjustmentUploadErrorEnum>
PARSER = new com.google.protobuf.AbstractParser<ConversionAdjustmentUploadErrorEnum>() {
@java.lang.Override
public ConversionAdjustmentUploadErrorEnum parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ConversionAdjustmentUploadErrorEnum> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ConversionAdjustmentUploadErrorEnum> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v19.errors.ConversionAdjustmentUploadErrorEnum getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 37,156 | google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/errors/ConversionAdjustmentUploadErrorEnum.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v20/errors/conversion_adjustment_upload_error.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v20.errors;
/**
* <pre>
* Container for enum describing possible conversion adjustment upload errors.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum}
*/
public final class ConversionAdjustmentUploadErrorEnum extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum)
ConversionAdjustmentUploadErrorEnumOrBuilder {
private static final long serialVersionUID = 0L;
// Use ConversionAdjustmentUploadErrorEnum.newBuilder() to construct.
private ConversionAdjustmentUploadErrorEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ConversionAdjustmentUploadErrorEnum() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ConversionAdjustmentUploadErrorEnum();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorProto.internal_static_google_ads_googleads_v20_errors_ConversionAdjustmentUploadErrorEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorProto.internal_static_google_ads_googleads_v20_errors_ConversionAdjustmentUploadErrorEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum.class, com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum.Builder.class);
}
/**
* <pre>
* Enum describing possible conversion adjustment upload errors.
* </pre>
*
* Protobuf enum {@code google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum.ConversionAdjustmentUploadError}
*/
public enum ConversionAdjustmentUploadError
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <pre>
* Not specified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
UNSPECIFIED(0),
/**
* <pre>
* Used for return value only. Represents value unknown in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
UNKNOWN(1),
/**
* <pre>
* Can't import events to a conversion action that was just created. Try
* importing again in 6 hours.
* </pre>
*
* <code>TOO_RECENT_CONVERSION_ACTION = 2;</code>
*/
TOO_RECENT_CONVERSION_ACTION(2),
/**
* <pre>
* The conversion was already retracted. This adjustment was not processed.
* </pre>
*
* <code>CONVERSION_ALREADY_RETRACTED = 4;</code>
*/
CONVERSION_ALREADY_RETRACTED(4),
/**
* <pre>
* The conversion for this conversion action and conversion identifier can't
* be found. Make sure your conversion identifiers are associated with the
* correct conversion action and try again.
* </pre>
*
* <code>CONVERSION_NOT_FOUND = 5;</code>
*/
CONVERSION_NOT_FOUND(5),
/**
* <pre>
* Adjustment can't be made to a conversion that occurred more than 54 days
* ago.
* </pre>
*
* <code>CONVERSION_EXPIRED = 6;</code>
*/
CONVERSION_EXPIRED(6),
/**
* <pre>
* Adjustment has an `adjustment_date_time` that occurred before the
* associated conversion. Make sure your `adjustment_date_time` is correct
* and try again.
* </pre>
*
* <code>ADJUSTMENT_PRECEDES_CONVERSION = 7;</code>
*/
ADJUSTMENT_PRECEDES_CONVERSION(7),
/**
* <pre>
* More recent adjustment `adjustment_date_time` has already been reported
* for the associated conversion. Make sure your adjustment
* `adjustment_date_time` is correct and try again.
* </pre>
*
* <code>MORE_RECENT_RESTATEMENT_FOUND = 8;</code>
*/
MORE_RECENT_RESTATEMENT_FOUND(8),
/**
* <pre>
* Adjustment can't be recorded because the conversion occurred too
* recently. Try adjusting a conversion that occurred at least 24 hours ago.
* </pre>
*
* <code>TOO_RECENT_CONVERSION = 9;</code>
*/
TOO_RECENT_CONVERSION(9),
/**
* <pre>
* Can't make an adjustment to a conversion that is set up to use the
* default value. Check your conversion action value setting and try again.
* </pre>
*
* <code>CANNOT_RESTATE_CONVERSION_ACTION_THAT_ALWAYS_USES_DEFAULT_CONVERSION_VALUE = 10;</code>
*/
CANNOT_RESTATE_CONVERSION_ACTION_THAT_ALWAYS_USES_DEFAULT_CONVERSION_VALUE(10),
/**
* <pre>
* Try uploading fewer than 2001 adjustments in a single API request.
* </pre>
*
* <code>TOO_MANY_ADJUSTMENTS_IN_REQUEST = 11;</code>
*/
TOO_MANY_ADJUSTMENTS_IN_REQUEST(11),
/**
* <pre>
* The conversion has already been adjusted the maximum number of times.
* Make sure you're only making necessary adjustment to existing conversion.
* </pre>
*
* <code>TOO_MANY_ADJUSTMENTS = 12;</code>
*/
TOO_MANY_ADJUSTMENTS(12),
/**
* <pre>
* The conversion has prior a restatement with the same
* `adjustment_date_time`. Make sure your adjustment has the correct and
* unique `adjustment_date_time` and try again.
* </pre>
*
* <code>RESTATEMENT_ALREADY_EXISTS = 13;</code>
*/
RESTATEMENT_ALREADY_EXISTS(13),
/**
* <pre>
* Imported adjustment has a duplicate conversion adjustment with same
* `adjustment_date_time`. Make sure your adjustment has the correct
* `adjustment_date_time` and try again.
* </pre>
*
* <code>DUPLICATE_ADJUSTMENT_IN_REQUEST = 14;</code>
*/
DUPLICATE_ADJUSTMENT_IN_REQUEST(14),
/**
* <pre>
* Make sure you agree to the customer data processing terms in conversion
* settings and try again.
* </pre>
*
* <code>CUSTOMER_NOT_ACCEPTED_CUSTOMER_DATA_TERMS = 15;</code>
*/
CUSTOMER_NOT_ACCEPTED_CUSTOMER_DATA_TERMS(15),
/**
* <pre>
* Can't use enhanced conversions with the specified conversion action.
* </pre>
*
* <code>CONVERSION_ACTION_NOT_ELIGIBLE_FOR_ENHANCEMENT = 16;</code>
*/
CONVERSION_ACTION_NOT_ELIGIBLE_FOR_ENHANCEMENT(16),
/**
* <pre>
* Make sure you hash user provided data using SHA-256 and ensure you are
* normalizing according to the guidelines.
* </pre>
*
* <code>INVALID_USER_IDENTIFIER = 17;</code>
*/
INVALID_USER_IDENTIFIER(17),
/**
* <pre>
* Use user provided data such as emails or phone numbers hashed using
* SHA-256 and try again.
* </pre>
*
* <code>UNSUPPORTED_USER_IDENTIFIER = 18;</code>
*/
UNSUPPORTED_USER_IDENTIFIER(18),
/**
* <pre>
* Cannot set both gclid_date_time_pair and order_id. Use only 1 type and
* try again.
* </pre>
*
* <code>GCLID_DATE_TIME_PAIR_AND_ORDER_ID_BOTH_SET = 20;</code>
*/
GCLID_DATE_TIME_PAIR_AND_ORDER_ID_BOTH_SET(20),
/**
* <pre>
* Conversion already has enhancements with the same Order ID and conversion
* action. Make sure your data is correctly configured and try again.
* </pre>
*
* <code>CONVERSION_ALREADY_ENHANCED = 21;</code>
*/
CONVERSION_ALREADY_ENHANCED(21),
/**
* <pre>
* Multiple enhancements have the same conversion action and Order ID. Make
* sure your data is correctly configured and try again.
* </pre>
*
* <code>DUPLICATE_ENHANCEMENT_IN_REQUEST = 22;</code>
*/
DUPLICATE_ENHANCEMENT_IN_REQUEST(22),
/**
* <pre>
* Enhanced conversions can't be used for this account because of Google
* customer data policies. Contact your Google representative.
* </pre>
*
* <code>CUSTOMER_DATA_POLICY_PROHIBITS_ENHANCEMENT = 23;</code>
*/
CUSTOMER_DATA_POLICY_PROHIBITS_ENHANCEMENT(23),
/**
* <pre>
* Adjustment for website conversion requires Order ID (ie, transaction ID).
* Make sure your website tags capture Order IDs and you send the same Order
* IDs with your adjustment.
* </pre>
*
* <code>MISSING_ORDER_ID_FOR_WEBPAGE = 24;</code>
*/
MISSING_ORDER_ID_FOR_WEBPAGE(24),
/**
* <pre>
* Can't use adjustment with Order IDs containing personally-identifiable
* information (PII).
* </pre>
*
* <code>ORDER_ID_CONTAINS_PII = 25;</code>
*/
ORDER_ID_CONTAINS_PII(25),
/**
* <pre>
* The provided job id in the request is not within the allowed range. A job
* ID must be a positive integer in the range [1, 2^31).
* </pre>
*
* <code>INVALID_JOB_ID = 26;</code>
*/
INVALID_JOB_ID(26),
/**
* <pre>
* The conversion action specified in the adjustment request cannot be
* found. Make sure it's available in this account.
* </pre>
*
* <code>NO_CONVERSION_ACTION_FOUND = 27;</code>
*/
NO_CONVERSION_ACTION_FOUND(27),
/**
* <pre>
* The type of the conversion action specified in the adjustment request
* isn't supported for uploading adjustments. A conversion adjustment of
* type `RETRACTION` or `RESTATEMENT` is only permitted for conversion
* actions of type `SALESFORCE`, `UPLOAD_CLICK` or `WEBPAGE`. A conversion
* adjustment of type `ENHANCEMENT` is only permitted for conversion
* actions of type `WEBPAGE`.
* </pre>
*
* <code>INVALID_CONVERSION_ACTION_TYPE = 28;</code>
*/
INVALID_CONVERSION_ACTION_TYPE(28),
UNRECOGNIZED(-1),
;
/**
* <pre>
* Not specified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
public static final int UNSPECIFIED_VALUE = 0;
/**
* <pre>
* Used for return value only. Represents value unknown in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
public static final int UNKNOWN_VALUE = 1;
/**
* <pre>
* Can't import events to a conversion action that was just created. Try
* importing again in 6 hours.
* </pre>
*
* <code>TOO_RECENT_CONVERSION_ACTION = 2;</code>
*/
public static final int TOO_RECENT_CONVERSION_ACTION_VALUE = 2;
/**
* <pre>
* The conversion was already retracted. This adjustment was not processed.
* </pre>
*
* <code>CONVERSION_ALREADY_RETRACTED = 4;</code>
*/
public static final int CONVERSION_ALREADY_RETRACTED_VALUE = 4;
/**
* <pre>
* The conversion for this conversion action and conversion identifier can't
* be found. Make sure your conversion identifiers are associated with the
* correct conversion action and try again.
* </pre>
*
* <code>CONVERSION_NOT_FOUND = 5;</code>
*/
public static final int CONVERSION_NOT_FOUND_VALUE = 5;
/**
* <pre>
* Adjustment can't be made to a conversion that occurred more than 54 days
* ago.
* </pre>
*
* <code>CONVERSION_EXPIRED = 6;</code>
*/
public static final int CONVERSION_EXPIRED_VALUE = 6;
/**
* <pre>
* Adjustment has an `adjustment_date_time` that occurred before the
* associated conversion. Make sure your `adjustment_date_time` is correct
* and try again.
* </pre>
*
* <code>ADJUSTMENT_PRECEDES_CONVERSION = 7;</code>
*/
public static final int ADJUSTMENT_PRECEDES_CONVERSION_VALUE = 7;
/**
* <pre>
* More recent adjustment `adjustment_date_time` has already been reported
* for the associated conversion. Make sure your adjustment
* `adjustment_date_time` is correct and try again.
* </pre>
*
* <code>MORE_RECENT_RESTATEMENT_FOUND = 8;</code>
*/
public static final int MORE_RECENT_RESTATEMENT_FOUND_VALUE = 8;
/**
* <pre>
* Adjustment can't be recorded because the conversion occurred too
* recently. Try adjusting a conversion that occurred at least 24 hours ago.
* </pre>
*
* <code>TOO_RECENT_CONVERSION = 9;</code>
*/
public static final int TOO_RECENT_CONVERSION_VALUE = 9;
/**
* <pre>
* Can't make an adjustment to a conversion that is set up to use the
* default value. Check your conversion action value setting and try again.
* </pre>
*
* <code>CANNOT_RESTATE_CONVERSION_ACTION_THAT_ALWAYS_USES_DEFAULT_CONVERSION_VALUE = 10;</code>
*/
public static final int CANNOT_RESTATE_CONVERSION_ACTION_THAT_ALWAYS_USES_DEFAULT_CONVERSION_VALUE_VALUE = 10;
/**
* <pre>
* Try uploading fewer than 2001 adjustments in a single API request.
* </pre>
*
* <code>TOO_MANY_ADJUSTMENTS_IN_REQUEST = 11;</code>
*/
public static final int TOO_MANY_ADJUSTMENTS_IN_REQUEST_VALUE = 11;
/**
* <pre>
* The conversion has already been adjusted the maximum number of times.
* Make sure you're only making necessary adjustment to existing conversion.
* </pre>
*
* <code>TOO_MANY_ADJUSTMENTS = 12;</code>
*/
public static final int TOO_MANY_ADJUSTMENTS_VALUE = 12;
/**
* <pre>
* The conversion has prior a restatement with the same
* `adjustment_date_time`. Make sure your adjustment has the correct and
* unique `adjustment_date_time` and try again.
* </pre>
*
* <code>RESTATEMENT_ALREADY_EXISTS = 13;</code>
*/
public static final int RESTATEMENT_ALREADY_EXISTS_VALUE = 13;
/**
* <pre>
* Imported adjustment has a duplicate conversion adjustment with same
* `adjustment_date_time`. Make sure your adjustment has the correct
* `adjustment_date_time` and try again.
* </pre>
*
* <code>DUPLICATE_ADJUSTMENT_IN_REQUEST = 14;</code>
*/
public static final int DUPLICATE_ADJUSTMENT_IN_REQUEST_VALUE = 14;
/**
* <pre>
* Make sure you agree to the customer data processing terms in conversion
* settings and try again.
* </pre>
*
* <code>CUSTOMER_NOT_ACCEPTED_CUSTOMER_DATA_TERMS = 15;</code>
*/
public static final int CUSTOMER_NOT_ACCEPTED_CUSTOMER_DATA_TERMS_VALUE = 15;
/**
* <pre>
* Can't use enhanced conversions with the specified conversion action.
* </pre>
*
* <code>CONVERSION_ACTION_NOT_ELIGIBLE_FOR_ENHANCEMENT = 16;</code>
*/
public static final int CONVERSION_ACTION_NOT_ELIGIBLE_FOR_ENHANCEMENT_VALUE = 16;
/**
* <pre>
* Make sure you hash user provided data using SHA-256 and ensure you are
* normalizing according to the guidelines.
* </pre>
*
* <code>INVALID_USER_IDENTIFIER = 17;</code>
*/
public static final int INVALID_USER_IDENTIFIER_VALUE = 17;
/**
* <pre>
* Use user provided data such as emails or phone numbers hashed using
* SHA-256 and try again.
* </pre>
*
* <code>UNSUPPORTED_USER_IDENTIFIER = 18;</code>
*/
public static final int UNSUPPORTED_USER_IDENTIFIER_VALUE = 18;
/**
* <pre>
* Cannot set both gclid_date_time_pair and order_id. Use only 1 type and
* try again.
* </pre>
*
* <code>GCLID_DATE_TIME_PAIR_AND_ORDER_ID_BOTH_SET = 20;</code>
*/
public static final int GCLID_DATE_TIME_PAIR_AND_ORDER_ID_BOTH_SET_VALUE = 20;
/**
* <pre>
* Conversion already has enhancements with the same Order ID and conversion
* action. Make sure your data is correctly configured and try again.
* </pre>
*
* <code>CONVERSION_ALREADY_ENHANCED = 21;</code>
*/
public static final int CONVERSION_ALREADY_ENHANCED_VALUE = 21;
/**
* <pre>
* Multiple enhancements have the same conversion action and Order ID. Make
* sure your data is correctly configured and try again.
* </pre>
*
* <code>DUPLICATE_ENHANCEMENT_IN_REQUEST = 22;</code>
*/
public static final int DUPLICATE_ENHANCEMENT_IN_REQUEST_VALUE = 22;
/**
* <pre>
* Enhanced conversions can't be used for this account because of Google
* customer data policies. Contact your Google representative.
* </pre>
*
* <code>CUSTOMER_DATA_POLICY_PROHIBITS_ENHANCEMENT = 23;</code>
*/
public static final int CUSTOMER_DATA_POLICY_PROHIBITS_ENHANCEMENT_VALUE = 23;
/**
* <pre>
* Adjustment for website conversion requires Order ID (ie, transaction ID).
* Make sure your website tags capture Order IDs and you send the same Order
* IDs with your adjustment.
* </pre>
*
* <code>MISSING_ORDER_ID_FOR_WEBPAGE = 24;</code>
*/
public static final int MISSING_ORDER_ID_FOR_WEBPAGE_VALUE = 24;
/**
* <pre>
* Can't use adjustment with Order IDs containing personally-identifiable
* information (PII).
* </pre>
*
* <code>ORDER_ID_CONTAINS_PII = 25;</code>
*/
public static final int ORDER_ID_CONTAINS_PII_VALUE = 25;
/**
* <pre>
* The provided job id in the request is not within the allowed range. A job
* ID must be a positive integer in the range [1, 2^31).
* </pre>
*
* <code>INVALID_JOB_ID = 26;</code>
*/
public static final int INVALID_JOB_ID_VALUE = 26;
/**
* <pre>
* The conversion action specified in the adjustment request cannot be
* found. Make sure it's available in this account.
* </pre>
*
* <code>NO_CONVERSION_ACTION_FOUND = 27;</code>
*/
public static final int NO_CONVERSION_ACTION_FOUND_VALUE = 27;
/**
* <pre>
* The type of the conversion action specified in the adjustment request
* isn't supported for uploading adjustments. A conversion adjustment of
* type `RETRACTION` or `RESTATEMENT` is only permitted for conversion
* actions of type `SALESFORCE`, `UPLOAD_CLICK` or `WEBPAGE`. A conversion
* adjustment of type `ENHANCEMENT` is only permitted for conversion
* actions of type `WEBPAGE`.
* </pre>
*
* <code>INVALID_CONVERSION_ACTION_TYPE = 28;</code>
*/
public static final int INVALID_CONVERSION_ACTION_TYPE_VALUE = 28;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static ConversionAdjustmentUploadError valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static ConversionAdjustmentUploadError forNumber(int value) {
switch (value) {
case 0: return UNSPECIFIED;
case 1: return UNKNOWN;
case 2: return TOO_RECENT_CONVERSION_ACTION;
case 4: return CONVERSION_ALREADY_RETRACTED;
case 5: return CONVERSION_NOT_FOUND;
case 6: return CONVERSION_EXPIRED;
case 7: return ADJUSTMENT_PRECEDES_CONVERSION;
case 8: return MORE_RECENT_RESTATEMENT_FOUND;
case 9: return TOO_RECENT_CONVERSION;
case 10: return CANNOT_RESTATE_CONVERSION_ACTION_THAT_ALWAYS_USES_DEFAULT_CONVERSION_VALUE;
case 11: return TOO_MANY_ADJUSTMENTS_IN_REQUEST;
case 12: return TOO_MANY_ADJUSTMENTS;
case 13: return RESTATEMENT_ALREADY_EXISTS;
case 14: return DUPLICATE_ADJUSTMENT_IN_REQUEST;
case 15: return CUSTOMER_NOT_ACCEPTED_CUSTOMER_DATA_TERMS;
case 16: return CONVERSION_ACTION_NOT_ELIGIBLE_FOR_ENHANCEMENT;
case 17: return INVALID_USER_IDENTIFIER;
case 18: return UNSUPPORTED_USER_IDENTIFIER;
case 20: return GCLID_DATE_TIME_PAIR_AND_ORDER_ID_BOTH_SET;
case 21: return CONVERSION_ALREADY_ENHANCED;
case 22: return DUPLICATE_ENHANCEMENT_IN_REQUEST;
case 23: return CUSTOMER_DATA_POLICY_PROHIBITS_ENHANCEMENT;
case 24: return MISSING_ORDER_ID_FOR_WEBPAGE;
case 25: return ORDER_ID_CONTAINS_PII;
case 26: return INVALID_JOB_ID;
case 27: return NO_CONVERSION_ACTION_FOUND;
case 28: return INVALID_CONVERSION_ACTION_TYPE;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<ConversionAdjustmentUploadError>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<
ConversionAdjustmentUploadError> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<ConversionAdjustmentUploadError>() {
public ConversionAdjustmentUploadError findValueByNumber(int number) {
return ConversionAdjustmentUploadError.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum.getDescriptor().getEnumTypes().get(0);
}
private static final ConversionAdjustmentUploadError[] VALUES = values();
public static ConversionAdjustmentUploadError valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private ConversionAdjustmentUploadError(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum.ConversionAdjustmentUploadError)
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum)) {
return super.equals(obj);
}
com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum other = (com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Container for enum describing possible conversion adjustment upload errors.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum)
com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnumOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorProto.internal_static_google_ads_googleads_v20_errors_ConversionAdjustmentUploadErrorEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorProto.internal_static_google_ads_googleads_v20_errors_ConversionAdjustmentUploadErrorEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum.class, com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum.Builder.class);
}
// Construct using com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorProto.internal_static_google_ads_googleads_v20_errors_ConversionAdjustmentUploadErrorEnum_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum getDefaultInstanceForType() {
return com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum build() {
com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum buildPartial() {
com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum result = new com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum) {
return mergeFrom((com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum other) {
if (other == com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum)
private static final com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum();
}
public static com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ConversionAdjustmentUploadErrorEnum>
PARSER = new com.google.protobuf.AbstractParser<ConversionAdjustmentUploadErrorEnum>() {
@java.lang.Override
public ConversionAdjustmentUploadErrorEnum parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ConversionAdjustmentUploadErrorEnum> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ConversionAdjustmentUploadErrorEnum> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v20.errors.ConversionAdjustmentUploadErrorEnum getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 37,156 | google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/errors/ConversionAdjustmentUploadErrorEnum.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v21/errors/conversion_adjustment_upload_error.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v21.errors;
/**
* <pre>
* Container for enum describing possible conversion adjustment upload errors.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum}
*/
public final class ConversionAdjustmentUploadErrorEnum extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum)
ConversionAdjustmentUploadErrorEnumOrBuilder {
private static final long serialVersionUID = 0L;
// Use ConversionAdjustmentUploadErrorEnum.newBuilder() to construct.
private ConversionAdjustmentUploadErrorEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ConversionAdjustmentUploadErrorEnum() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ConversionAdjustmentUploadErrorEnum();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorProto.internal_static_google_ads_googleads_v21_errors_ConversionAdjustmentUploadErrorEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorProto.internal_static_google_ads_googleads_v21_errors_ConversionAdjustmentUploadErrorEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum.class, com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum.Builder.class);
}
/**
* <pre>
* Enum describing possible conversion adjustment upload errors.
* </pre>
*
* Protobuf enum {@code google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum.ConversionAdjustmentUploadError}
*/
public enum ConversionAdjustmentUploadError
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <pre>
* Not specified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
UNSPECIFIED(0),
/**
* <pre>
* Used for return value only. Represents value unknown in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
UNKNOWN(1),
/**
* <pre>
* Can't import events to a conversion action that was just created. Try
* importing again in 6 hours.
* </pre>
*
* <code>TOO_RECENT_CONVERSION_ACTION = 2;</code>
*/
TOO_RECENT_CONVERSION_ACTION(2),
/**
* <pre>
* The conversion was already retracted. This adjustment was not processed.
* </pre>
*
* <code>CONVERSION_ALREADY_RETRACTED = 4;</code>
*/
CONVERSION_ALREADY_RETRACTED(4),
/**
* <pre>
* The conversion for this conversion action and conversion identifier can't
* be found. Make sure your conversion identifiers are associated with the
* correct conversion action and try again.
* </pre>
*
* <code>CONVERSION_NOT_FOUND = 5;</code>
*/
CONVERSION_NOT_FOUND(5),
/**
* <pre>
* Adjustment can't be made to a conversion that occurred more than 54 days
* ago.
* </pre>
*
* <code>CONVERSION_EXPIRED = 6;</code>
*/
CONVERSION_EXPIRED(6),
/**
* <pre>
* Adjustment has an `adjustment_date_time` that occurred before the
* associated conversion. Make sure your `adjustment_date_time` is correct
* and try again.
* </pre>
*
* <code>ADJUSTMENT_PRECEDES_CONVERSION = 7;</code>
*/
ADJUSTMENT_PRECEDES_CONVERSION(7),
/**
* <pre>
* More recent adjustment `adjustment_date_time` has already been reported
* for the associated conversion. Make sure your adjustment
* `adjustment_date_time` is correct and try again.
* </pre>
*
* <code>MORE_RECENT_RESTATEMENT_FOUND = 8;</code>
*/
MORE_RECENT_RESTATEMENT_FOUND(8),
/**
* <pre>
* Adjustment can't be recorded because the conversion occurred too
* recently. Try adjusting a conversion that occurred at least 24 hours ago.
* </pre>
*
* <code>TOO_RECENT_CONVERSION = 9;</code>
*/
TOO_RECENT_CONVERSION(9),
/**
* <pre>
* Can't make an adjustment to a conversion that is set up to use the
* default value. Check your conversion action value setting and try again.
* </pre>
*
* <code>CANNOT_RESTATE_CONVERSION_ACTION_THAT_ALWAYS_USES_DEFAULT_CONVERSION_VALUE = 10;</code>
*/
CANNOT_RESTATE_CONVERSION_ACTION_THAT_ALWAYS_USES_DEFAULT_CONVERSION_VALUE(10),
/**
* <pre>
* Try uploading fewer than 2001 adjustments in a single API request.
* </pre>
*
* <code>TOO_MANY_ADJUSTMENTS_IN_REQUEST = 11;</code>
*/
TOO_MANY_ADJUSTMENTS_IN_REQUEST(11),
/**
* <pre>
* The conversion has already been adjusted the maximum number of times.
* Make sure you're only making necessary adjustment to existing conversion.
* </pre>
*
* <code>TOO_MANY_ADJUSTMENTS = 12;</code>
*/
TOO_MANY_ADJUSTMENTS(12),
/**
* <pre>
* The conversion has prior a restatement with the same
* `adjustment_date_time`. Make sure your adjustment has the correct and
* unique `adjustment_date_time` and try again.
* </pre>
*
* <code>RESTATEMENT_ALREADY_EXISTS = 13;</code>
*/
RESTATEMENT_ALREADY_EXISTS(13),
/**
* <pre>
* Imported adjustment has a duplicate conversion adjustment with same
* `adjustment_date_time`. Make sure your adjustment has the correct
* `adjustment_date_time` and try again.
* </pre>
*
* <code>DUPLICATE_ADJUSTMENT_IN_REQUEST = 14;</code>
*/
DUPLICATE_ADJUSTMENT_IN_REQUEST(14),
/**
* <pre>
* Make sure you agree to the customer data processing terms in conversion
* settings and try again.
* </pre>
*
* <code>CUSTOMER_NOT_ACCEPTED_CUSTOMER_DATA_TERMS = 15;</code>
*/
CUSTOMER_NOT_ACCEPTED_CUSTOMER_DATA_TERMS(15),
/**
* <pre>
* Can't use enhanced conversions with the specified conversion action.
* </pre>
*
* <code>CONVERSION_ACTION_NOT_ELIGIBLE_FOR_ENHANCEMENT = 16;</code>
*/
CONVERSION_ACTION_NOT_ELIGIBLE_FOR_ENHANCEMENT(16),
/**
* <pre>
* Make sure you hash user provided data using SHA-256 and ensure you are
* normalizing according to the guidelines.
* </pre>
*
* <code>INVALID_USER_IDENTIFIER = 17;</code>
*/
INVALID_USER_IDENTIFIER(17),
/**
* <pre>
* Use user provided data such as emails or phone numbers hashed using
* SHA-256 and try again.
* </pre>
*
* <code>UNSUPPORTED_USER_IDENTIFIER = 18;</code>
*/
UNSUPPORTED_USER_IDENTIFIER(18),
/**
* <pre>
* Cannot set both gclid_date_time_pair and order_id. Use only 1 type and
* try again.
* </pre>
*
* <code>GCLID_DATE_TIME_PAIR_AND_ORDER_ID_BOTH_SET = 20;</code>
*/
GCLID_DATE_TIME_PAIR_AND_ORDER_ID_BOTH_SET(20),
/**
* <pre>
* Conversion already has enhancements with the same Order ID and conversion
* action. Make sure your data is correctly configured and try again.
* </pre>
*
* <code>CONVERSION_ALREADY_ENHANCED = 21;</code>
*/
CONVERSION_ALREADY_ENHANCED(21),
/**
* <pre>
* Multiple enhancements have the same conversion action and Order ID. Make
* sure your data is correctly configured and try again.
* </pre>
*
* <code>DUPLICATE_ENHANCEMENT_IN_REQUEST = 22;</code>
*/
DUPLICATE_ENHANCEMENT_IN_REQUEST(22),
/**
* <pre>
* Enhanced conversions can't be used for this account because of Google
* customer data policies. Contact your Google representative.
* </pre>
*
* <code>CUSTOMER_DATA_POLICY_PROHIBITS_ENHANCEMENT = 23;</code>
*/
CUSTOMER_DATA_POLICY_PROHIBITS_ENHANCEMENT(23),
/**
* <pre>
* Adjustment for website conversion requires Order ID (ie, transaction ID).
* Make sure your website tags capture Order IDs and you send the same Order
* IDs with your adjustment.
* </pre>
*
* <code>MISSING_ORDER_ID_FOR_WEBPAGE = 24;</code>
*/
MISSING_ORDER_ID_FOR_WEBPAGE(24),
/**
* <pre>
* Can't use adjustment with Order IDs containing personally-identifiable
* information (PII).
* </pre>
*
* <code>ORDER_ID_CONTAINS_PII = 25;</code>
*/
ORDER_ID_CONTAINS_PII(25),
/**
* <pre>
* The provided job id in the request is not within the allowed range. A job
* ID must be a positive integer in the range [1, 2^31).
* </pre>
*
* <code>INVALID_JOB_ID = 26;</code>
*/
INVALID_JOB_ID(26),
/**
* <pre>
* The conversion action specified in the adjustment request cannot be
* found. Make sure it's available in this account.
* </pre>
*
* <code>NO_CONVERSION_ACTION_FOUND = 27;</code>
*/
NO_CONVERSION_ACTION_FOUND(27),
/**
* <pre>
* The type of the conversion action specified in the adjustment request
* isn't supported for uploading adjustments. A conversion adjustment of
* type `RETRACTION` or `RESTATEMENT` is only permitted for conversion
* actions of type `SALESFORCE`, `UPLOAD_CLICK` or `WEBPAGE`. A conversion
* adjustment of type `ENHANCEMENT` is only permitted for conversion
* actions of type `WEBPAGE`.
* </pre>
*
* <code>INVALID_CONVERSION_ACTION_TYPE = 28;</code>
*/
INVALID_CONVERSION_ACTION_TYPE(28),
UNRECOGNIZED(-1),
;
/**
* <pre>
* Not specified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
public static final int UNSPECIFIED_VALUE = 0;
/**
* <pre>
* Used for return value only. Represents value unknown in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
public static final int UNKNOWN_VALUE = 1;
/**
* <pre>
* Can't import events to a conversion action that was just created. Try
* importing again in 6 hours.
* </pre>
*
* <code>TOO_RECENT_CONVERSION_ACTION = 2;</code>
*/
public static final int TOO_RECENT_CONVERSION_ACTION_VALUE = 2;
/**
* <pre>
* The conversion was already retracted. This adjustment was not processed.
* </pre>
*
* <code>CONVERSION_ALREADY_RETRACTED = 4;</code>
*/
public static final int CONVERSION_ALREADY_RETRACTED_VALUE = 4;
/**
* <pre>
* The conversion for this conversion action and conversion identifier can't
* be found. Make sure your conversion identifiers are associated with the
* correct conversion action and try again.
* </pre>
*
* <code>CONVERSION_NOT_FOUND = 5;</code>
*/
public static final int CONVERSION_NOT_FOUND_VALUE = 5;
/**
* <pre>
* Adjustment can't be made to a conversion that occurred more than 54 days
* ago.
* </pre>
*
* <code>CONVERSION_EXPIRED = 6;</code>
*/
public static final int CONVERSION_EXPIRED_VALUE = 6;
/**
* <pre>
* Adjustment has an `adjustment_date_time` that occurred before the
* associated conversion. Make sure your `adjustment_date_time` is correct
* and try again.
* </pre>
*
* <code>ADJUSTMENT_PRECEDES_CONVERSION = 7;</code>
*/
public static final int ADJUSTMENT_PRECEDES_CONVERSION_VALUE = 7;
/**
* <pre>
* More recent adjustment `adjustment_date_time` has already been reported
* for the associated conversion. Make sure your adjustment
* `adjustment_date_time` is correct and try again.
* </pre>
*
* <code>MORE_RECENT_RESTATEMENT_FOUND = 8;</code>
*/
public static final int MORE_RECENT_RESTATEMENT_FOUND_VALUE = 8;
/**
* <pre>
* Adjustment can't be recorded because the conversion occurred too
* recently. Try adjusting a conversion that occurred at least 24 hours ago.
* </pre>
*
* <code>TOO_RECENT_CONVERSION = 9;</code>
*/
public static final int TOO_RECENT_CONVERSION_VALUE = 9;
/**
* <pre>
* Can't make an adjustment to a conversion that is set up to use the
* default value. Check your conversion action value setting and try again.
* </pre>
*
* <code>CANNOT_RESTATE_CONVERSION_ACTION_THAT_ALWAYS_USES_DEFAULT_CONVERSION_VALUE = 10;</code>
*/
public static final int CANNOT_RESTATE_CONVERSION_ACTION_THAT_ALWAYS_USES_DEFAULT_CONVERSION_VALUE_VALUE = 10;
/**
* <pre>
* Try uploading fewer than 2001 adjustments in a single API request.
* </pre>
*
* <code>TOO_MANY_ADJUSTMENTS_IN_REQUEST = 11;</code>
*/
public static final int TOO_MANY_ADJUSTMENTS_IN_REQUEST_VALUE = 11;
/**
* <pre>
* The conversion has already been adjusted the maximum number of times.
* Make sure you're only making necessary adjustment to existing conversion.
* </pre>
*
* <code>TOO_MANY_ADJUSTMENTS = 12;</code>
*/
public static final int TOO_MANY_ADJUSTMENTS_VALUE = 12;
/**
* <pre>
* The conversion has prior a restatement with the same
* `adjustment_date_time`. Make sure your adjustment has the correct and
* unique `adjustment_date_time` and try again.
* </pre>
*
* <code>RESTATEMENT_ALREADY_EXISTS = 13;</code>
*/
public static final int RESTATEMENT_ALREADY_EXISTS_VALUE = 13;
/**
* <pre>
* Imported adjustment has a duplicate conversion adjustment with same
* `adjustment_date_time`. Make sure your adjustment has the correct
* `adjustment_date_time` and try again.
* </pre>
*
* <code>DUPLICATE_ADJUSTMENT_IN_REQUEST = 14;</code>
*/
public static final int DUPLICATE_ADJUSTMENT_IN_REQUEST_VALUE = 14;
/**
* <pre>
* Make sure you agree to the customer data processing terms in conversion
* settings and try again.
* </pre>
*
* <code>CUSTOMER_NOT_ACCEPTED_CUSTOMER_DATA_TERMS = 15;</code>
*/
public static final int CUSTOMER_NOT_ACCEPTED_CUSTOMER_DATA_TERMS_VALUE = 15;
/**
* <pre>
* Can't use enhanced conversions with the specified conversion action.
* </pre>
*
* <code>CONVERSION_ACTION_NOT_ELIGIBLE_FOR_ENHANCEMENT = 16;</code>
*/
public static final int CONVERSION_ACTION_NOT_ELIGIBLE_FOR_ENHANCEMENT_VALUE = 16;
/**
* <pre>
* Make sure you hash user provided data using SHA-256 and ensure you are
* normalizing according to the guidelines.
* </pre>
*
* <code>INVALID_USER_IDENTIFIER = 17;</code>
*/
public static final int INVALID_USER_IDENTIFIER_VALUE = 17;
/**
* <pre>
* Use user provided data such as emails or phone numbers hashed using
* SHA-256 and try again.
* </pre>
*
* <code>UNSUPPORTED_USER_IDENTIFIER = 18;</code>
*/
public static final int UNSUPPORTED_USER_IDENTIFIER_VALUE = 18;
/**
* <pre>
* Cannot set both gclid_date_time_pair and order_id. Use only 1 type and
* try again.
* </pre>
*
* <code>GCLID_DATE_TIME_PAIR_AND_ORDER_ID_BOTH_SET = 20;</code>
*/
public static final int GCLID_DATE_TIME_PAIR_AND_ORDER_ID_BOTH_SET_VALUE = 20;
/**
* <pre>
* Conversion already has enhancements with the same Order ID and conversion
* action. Make sure your data is correctly configured and try again.
* </pre>
*
* <code>CONVERSION_ALREADY_ENHANCED = 21;</code>
*/
public static final int CONVERSION_ALREADY_ENHANCED_VALUE = 21;
/**
* <pre>
* Multiple enhancements have the same conversion action and Order ID. Make
* sure your data is correctly configured and try again.
* </pre>
*
* <code>DUPLICATE_ENHANCEMENT_IN_REQUEST = 22;</code>
*/
public static final int DUPLICATE_ENHANCEMENT_IN_REQUEST_VALUE = 22;
/**
* <pre>
* Enhanced conversions can't be used for this account because of Google
* customer data policies. Contact your Google representative.
* </pre>
*
* <code>CUSTOMER_DATA_POLICY_PROHIBITS_ENHANCEMENT = 23;</code>
*/
public static final int CUSTOMER_DATA_POLICY_PROHIBITS_ENHANCEMENT_VALUE = 23;
/**
* <pre>
* Adjustment for website conversion requires Order ID (ie, transaction ID).
* Make sure your website tags capture Order IDs and you send the same Order
* IDs with your adjustment.
* </pre>
*
* <code>MISSING_ORDER_ID_FOR_WEBPAGE = 24;</code>
*/
public static final int MISSING_ORDER_ID_FOR_WEBPAGE_VALUE = 24;
/**
* <pre>
* Can't use adjustment with Order IDs containing personally-identifiable
* information (PII).
* </pre>
*
* <code>ORDER_ID_CONTAINS_PII = 25;</code>
*/
public static final int ORDER_ID_CONTAINS_PII_VALUE = 25;
/**
* <pre>
* The provided job id in the request is not within the allowed range. A job
* ID must be a positive integer in the range [1, 2^31).
* </pre>
*
* <code>INVALID_JOB_ID = 26;</code>
*/
public static final int INVALID_JOB_ID_VALUE = 26;
/**
* <pre>
* The conversion action specified in the adjustment request cannot be
* found. Make sure it's available in this account.
* </pre>
*
* <code>NO_CONVERSION_ACTION_FOUND = 27;</code>
*/
public static final int NO_CONVERSION_ACTION_FOUND_VALUE = 27;
/**
* <pre>
* The type of the conversion action specified in the adjustment request
* isn't supported for uploading adjustments. A conversion adjustment of
* type `RETRACTION` or `RESTATEMENT` is only permitted for conversion
* actions of type `SALESFORCE`, `UPLOAD_CLICK` or `WEBPAGE`. A conversion
* adjustment of type `ENHANCEMENT` is only permitted for conversion
* actions of type `WEBPAGE`.
* </pre>
*
* <code>INVALID_CONVERSION_ACTION_TYPE = 28;</code>
*/
public static final int INVALID_CONVERSION_ACTION_TYPE_VALUE = 28;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static ConversionAdjustmentUploadError valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static ConversionAdjustmentUploadError forNumber(int value) {
switch (value) {
case 0: return UNSPECIFIED;
case 1: return UNKNOWN;
case 2: return TOO_RECENT_CONVERSION_ACTION;
case 4: return CONVERSION_ALREADY_RETRACTED;
case 5: return CONVERSION_NOT_FOUND;
case 6: return CONVERSION_EXPIRED;
case 7: return ADJUSTMENT_PRECEDES_CONVERSION;
case 8: return MORE_RECENT_RESTATEMENT_FOUND;
case 9: return TOO_RECENT_CONVERSION;
case 10: return CANNOT_RESTATE_CONVERSION_ACTION_THAT_ALWAYS_USES_DEFAULT_CONVERSION_VALUE;
case 11: return TOO_MANY_ADJUSTMENTS_IN_REQUEST;
case 12: return TOO_MANY_ADJUSTMENTS;
case 13: return RESTATEMENT_ALREADY_EXISTS;
case 14: return DUPLICATE_ADJUSTMENT_IN_REQUEST;
case 15: return CUSTOMER_NOT_ACCEPTED_CUSTOMER_DATA_TERMS;
case 16: return CONVERSION_ACTION_NOT_ELIGIBLE_FOR_ENHANCEMENT;
case 17: return INVALID_USER_IDENTIFIER;
case 18: return UNSUPPORTED_USER_IDENTIFIER;
case 20: return GCLID_DATE_TIME_PAIR_AND_ORDER_ID_BOTH_SET;
case 21: return CONVERSION_ALREADY_ENHANCED;
case 22: return DUPLICATE_ENHANCEMENT_IN_REQUEST;
case 23: return CUSTOMER_DATA_POLICY_PROHIBITS_ENHANCEMENT;
case 24: return MISSING_ORDER_ID_FOR_WEBPAGE;
case 25: return ORDER_ID_CONTAINS_PII;
case 26: return INVALID_JOB_ID;
case 27: return NO_CONVERSION_ACTION_FOUND;
case 28: return INVALID_CONVERSION_ACTION_TYPE;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<ConversionAdjustmentUploadError>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<
ConversionAdjustmentUploadError> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<ConversionAdjustmentUploadError>() {
public ConversionAdjustmentUploadError findValueByNumber(int number) {
return ConversionAdjustmentUploadError.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum.getDescriptor().getEnumTypes().get(0);
}
private static final ConversionAdjustmentUploadError[] VALUES = values();
public static ConversionAdjustmentUploadError valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private ConversionAdjustmentUploadError(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum.ConversionAdjustmentUploadError)
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum)) {
return super.equals(obj);
}
com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum other = (com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Container for enum describing possible conversion adjustment upload errors.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum)
com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnumOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorProto.internal_static_google_ads_googleads_v21_errors_ConversionAdjustmentUploadErrorEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorProto.internal_static_google_ads_googleads_v21_errors_ConversionAdjustmentUploadErrorEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum.class, com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum.Builder.class);
}
// Construct using com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorProto.internal_static_google_ads_googleads_v21_errors_ConversionAdjustmentUploadErrorEnum_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum getDefaultInstanceForType() {
return com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum build() {
com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum buildPartial() {
com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum result = new com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum) {
return mergeFrom((com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum other) {
if (other == com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum)
private static final com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum();
}
public static com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ConversionAdjustmentUploadErrorEnum>
PARSER = new com.google.protobuf.AbstractParser<ConversionAdjustmentUploadErrorEnum>() {
@java.lang.Override
public ConversionAdjustmentUploadErrorEnum parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ConversionAdjustmentUploadErrorEnum> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ConversionAdjustmentUploadErrorEnum> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v21.errors.ConversionAdjustmentUploadErrorEnum getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,346 | java-domains/google-cloud-domains/src/main/java/com/google/cloud/domains/v1/stub/GrpcDomainsStub.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.domains.v1.stub;
import static com.google.cloud.domains.v1.DomainsClient.ListRegistrationsPagedResponse;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.grpc.GrpcCallSettings;
import com.google.api.gax.grpc.GrpcStubCallableFactory;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.RequestParamsBuilder;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.domains.v1.AuthorizationCode;
import com.google.cloud.domains.v1.ConfigureContactSettingsRequest;
import com.google.cloud.domains.v1.ConfigureDnsSettingsRequest;
import com.google.cloud.domains.v1.ConfigureManagementSettingsRequest;
import com.google.cloud.domains.v1.DeleteRegistrationRequest;
import com.google.cloud.domains.v1.ExportRegistrationRequest;
import com.google.cloud.domains.v1.GetRegistrationRequest;
import com.google.cloud.domains.v1.ListRegistrationsRequest;
import com.google.cloud.domains.v1.ListRegistrationsResponse;
import com.google.cloud.domains.v1.OperationMetadata;
import com.google.cloud.domains.v1.RegisterDomainRequest;
import com.google.cloud.domains.v1.Registration;
import com.google.cloud.domains.v1.ResetAuthorizationCodeRequest;
import com.google.cloud.domains.v1.RetrieveAuthorizationCodeRequest;
import com.google.cloud.domains.v1.RetrieveRegisterParametersRequest;
import com.google.cloud.domains.v1.RetrieveRegisterParametersResponse;
import com.google.cloud.domains.v1.RetrieveTransferParametersRequest;
import com.google.cloud.domains.v1.RetrieveTransferParametersResponse;
import com.google.cloud.domains.v1.SearchDomainsRequest;
import com.google.cloud.domains.v1.SearchDomainsResponse;
import com.google.cloud.domains.v1.TransferDomainRequest;
import com.google.cloud.domains.v1.UpdateRegistrationRequest;
import com.google.longrunning.Operation;
import com.google.longrunning.stub.GrpcOperationsStub;
import com.google.protobuf.Empty;
import io.grpc.MethodDescriptor;
import io.grpc.protobuf.ProtoUtils;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* gRPC stub implementation for the Domains service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@Generated("by gapic-generator-java")
public class GrpcDomainsStub extends DomainsStub {
private static final MethodDescriptor<SearchDomainsRequest, SearchDomainsResponse>
searchDomainsMethodDescriptor =
MethodDescriptor.<SearchDomainsRequest, SearchDomainsResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.domains.v1.Domains/SearchDomains")
.setRequestMarshaller(
ProtoUtils.marshaller(SearchDomainsRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(SearchDomainsResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<
RetrieveRegisterParametersRequest, RetrieveRegisterParametersResponse>
retrieveRegisterParametersMethodDescriptor =
MethodDescriptor
.<RetrieveRegisterParametersRequest, RetrieveRegisterParametersResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.domains.v1.Domains/RetrieveRegisterParameters")
.setRequestMarshaller(
ProtoUtils.marshaller(RetrieveRegisterParametersRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(RetrieveRegisterParametersResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<RegisterDomainRequest, Operation>
registerDomainMethodDescriptor =
MethodDescriptor.<RegisterDomainRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.domains.v1.Domains/RegisterDomain")
.setRequestMarshaller(
ProtoUtils.marshaller(RegisterDomainRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<
RetrieveTransferParametersRequest, RetrieveTransferParametersResponse>
retrieveTransferParametersMethodDescriptor =
MethodDescriptor
.<RetrieveTransferParametersRequest, RetrieveTransferParametersResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.domains.v1.Domains/RetrieveTransferParameters")
.setRequestMarshaller(
ProtoUtils.marshaller(RetrieveTransferParametersRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(RetrieveTransferParametersResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<TransferDomainRequest, Operation>
transferDomainMethodDescriptor =
MethodDescriptor.<TransferDomainRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.domains.v1.Domains/TransferDomain")
.setRequestMarshaller(
ProtoUtils.marshaller(TransferDomainRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<ListRegistrationsRequest, ListRegistrationsResponse>
listRegistrationsMethodDescriptor =
MethodDescriptor.<ListRegistrationsRequest, ListRegistrationsResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.domains.v1.Domains/ListRegistrations")
.setRequestMarshaller(
ProtoUtils.marshaller(ListRegistrationsRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(ListRegistrationsResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<GetRegistrationRequest, Registration>
getRegistrationMethodDescriptor =
MethodDescriptor.<GetRegistrationRequest, Registration>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.domains.v1.Domains/GetRegistration")
.setRequestMarshaller(
ProtoUtils.marshaller(GetRegistrationRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Registration.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<UpdateRegistrationRequest, Operation>
updateRegistrationMethodDescriptor =
MethodDescriptor.<UpdateRegistrationRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.domains.v1.Domains/UpdateRegistration")
.setRequestMarshaller(
ProtoUtils.marshaller(UpdateRegistrationRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<ConfigureManagementSettingsRequest, Operation>
configureManagementSettingsMethodDescriptor =
MethodDescriptor.<ConfigureManagementSettingsRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.domains.v1.Domains/ConfigureManagementSettings")
.setRequestMarshaller(
ProtoUtils.marshaller(ConfigureManagementSettingsRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<ConfigureDnsSettingsRequest, Operation>
configureDnsSettingsMethodDescriptor =
MethodDescriptor.<ConfigureDnsSettingsRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.domains.v1.Domains/ConfigureDnsSettings")
.setRequestMarshaller(
ProtoUtils.marshaller(ConfigureDnsSettingsRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<ConfigureContactSettingsRequest, Operation>
configureContactSettingsMethodDescriptor =
MethodDescriptor.<ConfigureContactSettingsRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.domains.v1.Domains/ConfigureContactSettings")
.setRequestMarshaller(
ProtoUtils.marshaller(ConfigureContactSettingsRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<ExportRegistrationRequest, Operation>
exportRegistrationMethodDescriptor =
MethodDescriptor.<ExportRegistrationRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.domains.v1.Domains/ExportRegistration")
.setRequestMarshaller(
ProtoUtils.marshaller(ExportRegistrationRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<DeleteRegistrationRequest, Operation>
deleteRegistrationMethodDescriptor =
MethodDescriptor.<DeleteRegistrationRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.domains.v1.Domains/DeleteRegistration")
.setRequestMarshaller(
ProtoUtils.marshaller(DeleteRegistrationRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<RetrieveAuthorizationCodeRequest, AuthorizationCode>
retrieveAuthorizationCodeMethodDescriptor =
MethodDescriptor.<RetrieveAuthorizationCodeRequest, AuthorizationCode>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.domains.v1.Domains/RetrieveAuthorizationCode")
.setRequestMarshaller(
ProtoUtils.marshaller(RetrieveAuthorizationCodeRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(AuthorizationCode.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<ResetAuthorizationCodeRequest, AuthorizationCode>
resetAuthorizationCodeMethodDescriptor =
MethodDescriptor.<ResetAuthorizationCodeRequest, AuthorizationCode>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.domains.v1.Domains/ResetAuthorizationCode")
.setRequestMarshaller(
ProtoUtils.marshaller(ResetAuthorizationCodeRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(AuthorizationCode.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private final UnaryCallable<SearchDomainsRequest, SearchDomainsResponse> searchDomainsCallable;
private final UnaryCallable<RetrieveRegisterParametersRequest, RetrieveRegisterParametersResponse>
retrieveRegisterParametersCallable;
private final UnaryCallable<RegisterDomainRequest, Operation> registerDomainCallable;
private final OperationCallable<RegisterDomainRequest, Registration, OperationMetadata>
registerDomainOperationCallable;
private final UnaryCallable<RetrieveTransferParametersRequest, RetrieveTransferParametersResponse>
retrieveTransferParametersCallable;
private final UnaryCallable<TransferDomainRequest, Operation> transferDomainCallable;
private final OperationCallable<TransferDomainRequest, Registration, OperationMetadata>
transferDomainOperationCallable;
private final UnaryCallable<ListRegistrationsRequest, ListRegistrationsResponse>
listRegistrationsCallable;
private final UnaryCallable<ListRegistrationsRequest, ListRegistrationsPagedResponse>
listRegistrationsPagedCallable;
private final UnaryCallable<GetRegistrationRequest, Registration> getRegistrationCallable;
private final UnaryCallable<UpdateRegistrationRequest, Operation> updateRegistrationCallable;
private final OperationCallable<UpdateRegistrationRequest, Registration, OperationMetadata>
updateRegistrationOperationCallable;
private final UnaryCallable<ConfigureManagementSettingsRequest, Operation>
configureManagementSettingsCallable;
private final OperationCallable<
ConfigureManagementSettingsRequest, Registration, OperationMetadata>
configureManagementSettingsOperationCallable;
private final UnaryCallable<ConfigureDnsSettingsRequest, Operation> configureDnsSettingsCallable;
private final OperationCallable<ConfigureDnsSettingsRequest, Registration, OperationMetadata>
configureDnsSettingsOperationCallable;
private final UnaryCallable<ConfigureContactSettingsRequest, Operation>
configureContactSettingsCallable;
private final OperationCallable<ConfigureContactSettingsRequest, Registration, OperationMetadata>
configureContactSettingsOperationCallable;
private final UnaryCallable<ExportRegistrationRequest, Operation> exportRegistrationCallable;
private final OperationCallable<ExportRegistrationRequest, Registration, OperationMetadata>
exportRegistrationOperationCallable;
private final UnaryCallable<DeleteRegistrationRequest, Operation> deleteRegistrationCallable;
private final OperationCallable<DeleteRegistrationRequest, Empty, OperationMetadata>
deleteRegistrationOperationCallable;
private final UnaryCallable<RetrieveAuthorizationCodeRequest, AuthorizationCode>
retrieveAuthorizationCodeCallable;
private final UnaryCallable<ResetAuthorizationCodeRequest, AuthorizationCode>
resetAuthorizationCodeCallable;
private final BackgroundResource backgroundResources;
private final GrpcOperationsStub operationsStub;
private final GrpcStubCallableFactory callableFactory;
public static final GrpcDomainsStub create(DomainsStubSettings settings) throws IOException {
return new GrpcDomainsStub(settings, ClientContext.create(settings));
}
public static final GrpcDomainsStub create(ClientContext clientContext) throws IOException {
return new GrpcDomainsStub(DomainsStubSettings.newBuilder().build(), clientContext);
}
public static final GrpcDomainsStub create(
ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException {
return new GrpcDomainsStub(
DomainsStubSettings.newBuilder().build(), clientContext, callableFactory);
}
/**
* Constructs an instance of GrpcDomainsStub, using the given settings. This is protected so that
* it is easy to make a subclass, but otherwise, the static factory methods should be preferred.
*/
protected GrpcDomainsStub(DomainsStubSettings settings, ClientContext clientContext)
throws IOException {
this(settings, clientContext, new GrpcDomainsCallableFactory());
}
/**
* Constructs an instance of GrpcDomainsStub, using the given settings. This is protected so that
* it is easy to make a subclass, but otherwise, the static factory methods should be preferred.
*/
protected GrpcDomainsStub(
DomainsStubSettings settings,
ClientContext clientContext,
GrpcStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory);
GrpcCallSettings<SearchDomainsRequest, SearchDomainsResponse> searchDomainsTransportSettings =
GrpcCallSettings.<SearchDomainsRequest, SearchDomainsResponse>newBuilder()
.setMethodDescriptor(searchDomainsMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("location", String.valueOf(request.getLocation()));
return builder.build();
})
.build();
GrpcCallSettings<RetrieveRegisterParametersRequest, RetrieveRegisterParametersResponse>
retrieveRegisterParametersTransportSettings =
GrpcCallSettings
.<RetrieveRegisterParametersRequest, RetrieveRegisterParametersResponse>newBuilder()
.setMethodDescriptor(retrieveRegisterParametersMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("location", String.valueOf(request.getLocation()));
return builder.build();
})
.build();
GrpcCallSettings<RegisterDomainRequest, Operation> registerDomainTransportSettings =
GrpcCallSettings.<RegisterDomainRequest, Operation>newBuilder()
.setMethodDescriptor(registerDomainMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<RetrieveTransferParametersRequest, RetrieveTransferParametersResponse>
retrieveTransferParametersTransportSettings =
GrpcCallSettings
.<RetrieveTransferParametersRequest, RetrieveTransferParametersResponse>newBuilder()
.setMethodDescriptor(retrieveTransferParametersMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("location", String.valueOf(request.getLocation()));
return builder.build();
})
.build();
GrpcCallSettings<TransferDomainRequest, Operation> transferDomainTransportSettings =
GrpcCallSettings.<TransferDomainRequest, Operation>newBuilder()
.setMethodDescriptor(transferDomainMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<ListRegistrationsRequest, ListRegistrationsResponse>
listRegistrationsTransportSettings =
GrpcCallSettings.<ListRegistrationsRequest, ListRegistrationsResponse>newBuilder()
.setMethodDescriptor(listRegistrationsMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<GetRegistrationRequest, Registration> getRegistrationTransportSettings =
GrpcCallSettings.<GetRegistrationRequest, Registration>newBuilder()
.setMethodDescriptor(getRegistrationMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<UpdateRegistrationRequest, Operation> updateRegistrationTransportSettings =
GrpcCallSettings.<UpdateRegistrationRequest, Operation>newBuilder()
.setMethodDescriptor(updateRegistrationMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add(
"registration.name", String.valueOf(request.getRegistration().getName()));
return builder.build();
})
.build();
GrpcCallSettings<ConfigureManagementSettingsRequest, Operation>
configureManagementSettingsTransportSettings =
GrpcCallSettings.<ConfigureManagementSettingsRequest, Operation>newBuilder()
.setMethodDescriptor(configureManagementSettingsMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("registration", String.valueOf(request.getRegistration()));
return builder.build();
})
.build();
GrpcCallSettings<ConfigureDnsSettingsRequest, Operation> configureDnsSettingsTransportSettings =
GrpcCallSettings.<ConfigureDnsSettingsRequest, Operation>newBuilder()
.setMethodDescriptor(configureDnsSettingsMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("registration", String.valueOf(request.getRegistration()));
return builder.build();
})
.build();
GrpcCallSettings<ConfigureContactSettingsRequest, Operation>
configureContactSettingsTransportSettings =
GrpcCallSettings.<ConfigureContactSettingsRequest, Operation>newBuilder()
.setMethodDescriptor(configureContactSettingsMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("registration", String.valueOf(request.getRegistration()));
return builder.build();
})
.build();
GrpcCallSettings<ExportRegistrationRequest, Operation> exportRegistrationTransportSettings =
GrpcCallSettings.<ExportRegistrationRequest, Operation>newBuilder()
.setMethodDescriptor(exportRegistrationMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<DeleteRegistrationRequest, Operation> deleteRegistrationTransportSettings =
GrpcCallSettings.<DeleteRegistrationRequest, Operation>newBuilder()
.setMethodDescriptor(deleteRegistrationMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<RetrieveAuthorizationCodeRequest, AuthorizationCode>
retrieveAuthorizationCodeTransportSettings =
GrpcCallSettings.<RetrieveAuthorizationCodeRequest, AuthorizationCode>newBuilder()
.setMethodDescriptor(retrieveAuthorizationCodeMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("registration", String.valueOf(request.getRegistration()));
return builder.build();
})
.build();
GrpcCallSettings<ResetAuthorizationCodeRequest, AuthorizationCode>
resetAuthorizationCodeTransportSettings =
GrpcCallSettings.<ResetAuthorizationCodeRequest, AuthorizationCode>newBuilder()
.setMethodDescriptor(resetAuthorizationCodeMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("registration", String.valueOf(request.getRegistration()));
return builder.build();
})
.build();
this.searchDomainsCallable =
callableFactory.createUnaryCallable(
searchDomainsTransportSettings, settings.searchDomainsSettings(), clientContext);
this.retrieveRegisterParametersCallable =
callableFactory.createUnaryCallable(
retrieveRegisterParametersTransportSettings,
settings.retrieveRegisterParametersSettings(),
clientContext);
this.registerDomainCallable =
callableFactory.createUnaryCallable(
registerDomainTransportSettings, settings.registerDomainSettings(), clientContext);
this.registerDomainOperationCallable =
callableFactory.createOperationCallable(
registerDomainTransportSettings,
settings.registerDomainOperationSettings(),
clientContext,
operationsStub);
this.retrieveTransferParametersCallable =
callableFactory.createUnaryCallable(
retrieveTransferParametersTransportSettings,
settings.retrieveTransferParametersSettings(),
clientContext);
this.transferDomainCallable =
callableFactory.createUnaryCallable(
transferDomainTransportSettings, settings.transferDomainSettings(), clientContext);
this.transferDomainOperationCallable =
callableFactory.createOperationCallable(
transferDomainTransportSettings,
settings.transferDomainOperationSettings(),
clientContext,
operationsStub);
this.listRegistrationsCallable =
callableFactory.createUnaryCallable(
listRegistrationsTransportSettings,
settings.listRegistrationsSettings(),
clientContext);
this.listRegistrationsPagedCallable =
callableFactory.createPagedCallable(
listRegistrationsTransportSettings,
settings.listRegistrationsSettings(),
clientContext);
this.getRegistrationCallable =
callableFactory.createUnaryCallable(
getRegistrationTransportSettings, settings.getRegistrationSettings(), clientContext);
this.updateRegistrationCallable =
callableFactory.createUnaryCallable(
updateRegistrationTransportSettings,
settings.updateRegistrationSettings(),
clientContext);
this.updateRegistrationOperationCallable =
callableFactory.createOperationCallable(
updateRegistrationTransportSettings,
settings.updateRegistrationOperationSettings(),
clientContext,
operationsStub);
this.configureManagementSettingsCallable =
callableFactory.createUnaryCallable(
configureManagementSettingsTransportSettings,
settings.configureManagementSettingsSettings(),
clientContext);
this.configureManagementSettingsOperationCallable =
callableFactory.createOperationCallable(
configureManagementSettingsTransportSettings,
settings.configureManagementSettingsOperationSettings(),
clientContext,
operationsStub);
this.configureDnsSettingsCallable =
callableFactory.createUnaryCallable(
configureDnsSettingsTransportSettings,
settings.configureDnsSettingsSettings(),
clientContext);
this.configureDnsSettingsOperationCallable =
callableFactory.createOperationCallable(
configureDnsSettingsTransportSettings,
settings.configureDnsSettingsOperationSettings(),
clientContext,
operationsStub);
this.configureContactSettingsCallable =
callableFactory.createUnaryCallable(
configureContactSettingsTransportSettings,
settings.configureContactSettingsSettings(),
clientContext);
this.configureContactSettingsOperationCallable =
callableFactory.createOperationCallable(
configureContactSettingsTransportSettings,
settings.configureContactSettingsOperationSettings(),
clientContext,
operationsStub);
this.exportRegistrationCallable =
callableFactory.createUnaryCallable(
exportRegistrationTransportSettings,
settings.exportRegistrationSettings(),
clientContext);
this.exportRegistrationOperationCallable =
callableFactory.createOperationCallable(
exportRegistrationTransportSettings,
settings.exportRegistrationOperationSettings(),
clientContext,
operationsStub);
this.deleteRegistrationCallable =
callableFactory.createUnaryCallable(
deleteRegistrationTransportSettings,
settings.deleteRegistrationSettings(),
clientContext);
this.deleteRegistrationOperationCallable =
callableFactory.createOperationCallable(
deleteRegistrationTransportSettings,
settings.deleteRegistrationOperationSettings(),
clientContext,
operationsStub);
this.retrieveAuthorizationCodeCallable =
callableFactory.createUnaryCallable(
retrieveAuthorizationCodeTransportSettings,
settings.retrieveAuthorizationCodeSettings(),
clientContext);
this.resetAuthorizationCodeCallable =
callableFactory.createUnaryCallable(
resetAuthorizationCodeTransportSettings,
settings.resetAuthorizationCodeSettings(),
clientContext);
this.backgroundResources =
new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
public GrpcOperationsStub getOperationsStub() {
return operationsStub;
}
@Override
public UnaryCallable<SearchDomainsRequest, SearchDomainsResponse> searchDomainsCallable() {
return searchDomainsCallable;
}
@Override
public UnaryCallable<RetrieveRegisterParametersRequest, RetrieveRegisterParametersResponse>
retrieveRegisterParametersCallable() {
return retrieveRegisterParametersCallable;
}
@Override
public UnaryCallable<RegisterDomainRequest, Operation> registerDomainCallable() {
return registerDomainCallable;
}
@Override
public OperationCallable<RegisterDomainRequest, Registration, OperationMetadata>
registerDomainOperationCallable() {
return registerDomainOperationCallable;
}
@Override
public UnaryCallable<RetrieveTransferParametersRequest, RetrieveTransferParametersResponse>
retrieveTransferParametersCallable() {
return retrieveTransferParametersCallable;
}
@Override
public UnaryCallable<TransferDomainRequest, Operation> transferDomainCallable() {
return transferDomainCallable;
}
@Override
public OperationCallable<TransferDomainRequest, Registration, OperationMetadata>
transferDomainOperationCallable() {
return transferDomainOperationCallable;
}
@Override
public UnaryCallable<ListRegistrationsRequest, ListRegistrationsResponse>
listRegistrationsCallable() {
return listRegistrationsCallable;
}
@Override
public UnaryCallable<ListRegistrationsRequest, ListRegistrationsPagedResponse>
listRegistrationsPagedCallable() {
return listRegistrationsPagedCallable;
}
@Override
public UnaryCallable<GetRegistrationRequest, Registration> getRegistrationCallable() {
return getRegistrationCallable;
}
@Override
public UnaryCallable<UpdateRegistrationRequest, Operation> updateRegistrationCallable() {
return updateRegistrationCallable;
}
@Override
public OperationCallable<UpdateRegistrationRequest, Registration, OperationMetadata>
updateRegistrationOperationCallable() {
return updateRegistrationOperationCallable;
}
@Override
public UnaryCallable<ConfigureManagementSettingsRequest, Operation>
configureManagementSettingsCallable() {
return configureManagementSettingsCallable;
}
@Override
public OperationCallable<ConfigureManagementSettingsRequest, Registration, OperationMetadata>
configureManagementSettingsOperationCallable() {
return configureManagementSettingsOperationCallable;
}
@Override
public UnaryCallable<ConfigureDnsSettingsRequest, Operation> configureDnsSettingsCallable() {
return configureDnsSettingsCallable;
}
@Override
public OperationCallable<ConfigureDnsSettingsRequest, Registration, OperationMetadata>
configureDnsSettingsOperationCallable() {
return configureDnsSettingsOperationCallable;
}
@Override
public UnaryCallable<ConfigureContactSettingsRequest, Operation>
configureContactSettingsCallable() {
return configureContactSettingsCallable;
}
@Override
public OperationCallable<ConfigureContactSettingsRequest, Registration, OperationMetadata>
configureContactSettingsOperationCallable() {
return configureContactSettingsOperationCallable;
}
@Override
public UnaryCallable<ExportRegistrationRequest, Operation> exportRegistrationCallable() {
return exportRegistrationCallable;
}
@Override
public OperationCallable<ExportRegistrationRequest, Registration, OperationMetadata>
exportRegistrationOperationCallable() {
return exportRegistrationOperationCallable;
}
@Override
public UnaryCallable<DeleteRegistrationRequest, Operation> deleteRegistrationCallable() {
return deleteRegistrationCallable;
}
@Override
public OperationCallable<DeleteRegistrationRequest, Empty, OperationMetadata>
deleteRegistrationOperationCallable() {
return deleteRegistrationOperationCallable;
}
@Override
public UnaryCallable<RetrieveAuthorizationCodeRequest, AuthorizationCode>
retrieveAuthorizationCodeCallable() {
return retrieveAuthorizationCodeCallable;
}
@Override
public UnaryCallable<ResetAuthorizationCodeRequest, AuthorizationCode>
resetAuthorizationCodeCallable() {
return resetAuthorizationCodeCallable;
}
@Override
public final void close() {
try {
backgroundResources.close();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Failed to close resource", e);
}
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
|
googleapis/google-cloud-java | 36,982 | java-service-management/proto-google-cloud-service-management-v1/src/main/java/com/google/api/servicemanagement/v1/ListServicesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/api/servicemanagement/v1/servicemanager.proto
// Protobuf Java Version: 3.25.8
package com.google.api.servicemanagement.v1;
/**
*
*
* <pre>
* Response message for `ListServices` method.
* </pre>
*
* Protobuf type {@code google.api.servicemanagement.v1.ListServicesResponse}
*/
public final class ListServicesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.api.servicemanagement.v1.ListServicesResponse)
ListServicesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListServicesResponse.newBuilder() to construct.
private ListServicesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListServicesResponse() {
services_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListServicesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.api.servicemanagement.v1.ServiceManagerProto
.internal_static_google_api_servicemanagement_v1_ListServicesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.servicemanagement.v1.ServiceManagerProto
.internal_static_google_api_servicemanagement_v1_ListServicesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.servicemanagement.v1.ListServicesResponse.class,
com.google.api.servicemanagement.v1.ListServicesResponse.Builder.class);
}
public static final int SERVICES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.api.servicemanagement.v1.ManagedService> services_;
/**
*
*
* <pre>
* The returned services will only have the name field set.
* </pre>
*
* <code>repeated .google.api.servicemanagement.v1.ManagedService services = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.api.servicemanagement.v1.ManagedService> getServicesList() {
return services_;
}
/**
*
*
* <pre>
* The returned services will only have the name field set.
* </pre>
*
* <code>repeated .google.api.servicemanagement.v1.ManagedService services = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.api.servicemanagement.v1.ManagedServiceOrBuilder>
getServicesOrBuilderList() {
return services_;
}
/**
*
*
* <pre>
* The returned services will only have the name field set.
* </pre>
*
* <code>repeated .google.api.servicemanagement.v1.ManagedService services = 1;</code>
*/
@java.lang.Override
public int getServicesCount() {
return services_.size();
}
/**
*
*
* <pre>
* The returned services will only have the name field set.
* </pre>
*
* <code>repeated .google.api.servicemanagement.v1.ManagedService services = 1;</code>
*/
@java.lang.Override
public com.google.api.servicemanagement.v1.ManagedService getServices(int index) {
return services_.get(index);
}
/**
*
*
* <pre>
* The returned services will only have the name field set.
* </pre>
*
* <code>repeated .google.api.servicemanagement.v1.ManagedService services = 1;</code>
*/
@java.lang.Override
public com.google.api.servicemanagement.v1.ManagedServiceOrBuilder getServicesOrBuilder(
int index) {
return services_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token that can be passed to `ListServices` to resume a paginated query.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token that can be passed to `ListServices` to resume a paginated query.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < services_.size(); i++) {
output.writeMessage(1, services_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < services_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, services_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.api.servicemanagement.v1.ListServicesResponse)) {
return super.equals(obj);
}
com.google.api.servicemanagement.v1.ListServicesResponse other =
(com.google.api.servicemanagement.v1.ListServicesResponse) obj;
if (!getServicesList().equals(other.getServicesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getServicesCount() > 0) {
hash = (37 * hash) + SERVICES_FIELD_NUMBER;
hash = (53 * hash) + getServicesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.api.servicemanagement.v1.ListServicesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.servicemanagement.v1.ListServicesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.servicemanagement.v1.ListServicesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.servicemanagement.v1.ListServicesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.servicemanagement.v1.ListServicesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.servicemanagement.v1.ListServicesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.servicemanagement.v1.ListServicesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.api.servicemanagement.v1.ListServicesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.api.servicemanagement.v1.ListServicesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.api.servicemanagement.v1.ListServicesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.api.servicemanagement.v1.ListServicesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.api.servicemanagement.v1.ListServicesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.api.servicemanagement.v1.ListServicesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for `ListServices` method.
* </pre>
*
* Protobuf type {@code google.api.servicemanagement.v1.ListServicesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.api.servicemanagement.v1.ListServicesResponse)
com.google.api.servicemanagement.v1.ListServicesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.api.servicemanagement.v1.ServiceManagerProto
.internal_static_google_api_servicemanagement_v1_ListServicesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.servicemanagement.v1.ServiceManagerProto
.internal_static_google_api_servicemanagement_v1_ListServicesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.servicemanagement.v1.ListServicesResponse.class,
com.google.api.servicemanagement.v1.ListServicesResponse.Builder.class);
}
// Construct using com.google.api.servicemanagement.v1.ListServicesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (servicesBuilder_ == null) {
services_ = java.util.Collections.emptyList();
} else {
services_ = null;
servicesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.api.servicemanagement.v1.ServiceManagerProto
.internal_static_google_api_servicemanagement_v1_ListServicesResponse_descriptor;
}
@java.lang.Override
public com.google.api.servicemanagement.v1.ListServicesResponse getDefaultInstanceForType() {
return com.google.api.servicemanagement.v1.ListServicesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.api.servicemanagement.v1.ListServicesResponse build() {
com.google.api.servicemanagement.v1.ListServicesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.api.servicemanagement.v1.ListServicesResponse buildPartial() {
com.google.api.servicemanagement.v1.ListServicesResponse result =
new com.google.api.servicemanagement.v1.ListServicesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.api.servicemanagement.v1.ListServicesResponse result) {
if (servicesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
services_ = java.util.Collections.unmodifiableList(services_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.services_ = services_;
} else {
result.services_ = servicesBuilder_.build();
}
}
private void buildPartial0(com.google.api.servicemanagement.v1.ListServicesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.api.servicemanagement.v1.ListServicesResponse) {
return mergeFrom((com.google.api.servicemanagement.v1.ListServicesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.api.servicemanagement.v1.ListServicesResponse other) {
if (other == com.google.api.servicemanagement.v1.ListServicesResponse.getDefaultInstance())
return this;
if (servicesBuilder_ == null) {
if (!other.services_.isEmpty()) {
if (services_.isEmpty()) {
services_ = other.services_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureServicesIsMutable();
services_.addAll(other.services_);
}
onChanged();
}
} else {
if (!other.services_.isEmpty()) {
if (servicesBuilder_.isEmpty()) {
servicesBuilder_.dispose();
servicesBuilder_ = null;
services_ = other.services_;
bitField0_ = (bitField0_ & ~0x00000001);
servicesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getServicesFieldBuilder()
: null;
} else {
servicesBuilder_.addAllMessages(other.services_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.api.servicemanagement.v1.ManagedService m =
input.readMessage(
com.google.api.servicemanagement.v1.ManagedService.parser(),
extensionRegistry);
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
services_.add(m);
} else {
servicesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.api.servicemanagement.v1.ManagedService> services_ =
java.util.Collections.emptyList();
private void ensureServicesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
services_ =
new java.util.ArrayList<com.google.api.servicemanagement.v1.ManagedService>(services_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.api.servicemanagement.v1.ManagedService,
com.google.api.servicemanagement.v1.ManagedService.Builder,
com.google.api.servicemanagement.v1.ManagedServiceOrBuilder>
servicesBuilder_;
/**
*
*
* <pre>
* The returned services will only have the name field set.
* </pre>
*
* <code>repeated .google.api.servicemanagement.v1.ManagedService services = 1;</code>
*/
public java.util.List<com.google.api.servicemanagement.v1.ManagedService> getServicesList() {
if (servicesBuilder_ == null) {
return java.util.Collections.unmodifiableList(services_);
} else {
return servicesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The returned services will only have the name field set.
* </pre>
*
* <code>repeated .google.api.servicemanagement.v1.ManagedService services = 1;</code>
*/
public int getServicesCount() {
if (servicesBuilder_ == null) {
return services_.size();
} else {
return servicesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The returned services will only have the name field set.
* </pre>
*
* <code>repeated .google.api.servicemanagement.v1.ManagedService services = 1;</code>
*/
public com.google.api.servicemanagement.v1.ManagedService getServices(int index) {
if (servicesBuilder_ == null) {
return services_.get(index);
} else {
return servicesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The returned services will only have the name field set.
* </pre>
*
* <code>repeated .google.api.servicemanagement.v1.ManagedService services = 1;</code>
*/
public Builder setServices(
int index, com.google.api.servicemanagement.v1.ManagedService value) {
if (servicesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureServicesIsMutable();
services_.set(index, value);
onChanged();
} else {
servicesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The returned services will only have the name field set.
* </pre>
*
* <code>repeated .google.api.servicemanagement.v1.ManagedService services = 1;</code>
*/
public Builder setServices(
int index, com.google.api.servicemanagement.v1.ManagedService.Builder builderForValue) {
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
services_.set(index, builderForValue.build());
onChanged();
} else {
servicesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The returned services will only have the name field set.
* </pre>
*
* <code>repeated .google.api.servicemanagement.v1.ManagedService services = 1;</code>
*/
public Builder addServices(com.google.api.servicemanagement.v1.ManagedService value) {
if (servicesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureServicesIsMutable();
services_.add(value);
onChanged();
} else {
servicesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The returned services will only have the name field set.
* </pre>
*
* <code>repeated .google.api.servicemanagement.v1.ManagedService services = 1;</code>
*/
public Builder addServices(
int index, com.google.api.servicemanagement.v1.ManagedService value) {
if (servicesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureServicesIsMutable();
services_.add(index, value);
onChanged();
} else {
servicesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The returned services will only have the name field set.
* </pre>
*
* <code>repeated .google.api.servicemanagement.v1.ManagedService services = 1;</code>
*/
public Builder addServices(
com.google.api.servicemanagement.v1.ManagedService.Builder builderForValue) {
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
services_.add(builderForValue.build());
onChanged();
} else {
servicesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The returned services will only have the name field set.
* </pre>
*
* <code>repeated .google.api.servicemanagement.v1.ManagedService services = 1;</code>
*/
public Builder addServices(
int index, com.google.api.servicemanagement.v1.ManagedService.Builder builderForValue) {
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
services_.add(index, builderForValue.build());
onChanged();
} else {
servicesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The returned services will only have the name field set.
* </pre>
*
* <code>repeated .google.api.servicemanagement.v1.ManagedService services = 1;</code>
*/
public Builder addAllServices(
java.lang.Iterable<? extends com.google.api.servicemanagement.v1.ManagedService> values) {
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, services_);
onChanged();
} else {
servicesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The returned services will only have the name field set.
* </pre>
*
* <code>repeated .google.api.servicemanagement.v1.ManagedService services = 1;</code>
*/
public Builder clearServices() {
if (servicesBuilder_ == null) {
services_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
servicesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The returned services will only have the name field set.
* </pre>
*
* <code>repeated .google.api.servicemanagement.v1.ManagedService services = 1;</code>
*/
public Builder removeServices(int index) {
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
services_.remove(index);
onChanged();
} else {
servicesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The returned services will only have the name field set.
* </pre>
*
* <code>repeated .google.api.servicemanagement.v1.ManagedService services = 1;</code>
*/
public com.google.api.servicemanagement.v1.ManagedService.Builder getServicesBuilder(
int index) {
return getServicesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The returned services will only have the name field set.
* </pre>
*
* <code>repeated .google.api.servicemanagement.v1.ManagedService services = 1;</code>
*/
public com.google.api.servicemanagement.v1.ManagedServiceOrBuilder getServicesOrBuilder(
int index) {
if (servicesBuilder_ == null) {
return services_.get(index);
} else {
return servicesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The returned services will only have the name field set.
* </pre>
*
* <code>repeated .google.api.servicemanagement.v1.ManagedService services = 1;</code>
*/
public java.util.List<? extends com.google.api.servicemanagement.v1.ManagedServiceOrBuilder>
getServicesOrBuilderList() {
if (servicesBuilder_ != null) {
return servicesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(services_);
}
}
/**
*
*
* <pre>
* The returned services will only have the name field set.
* </pre>
*
* <code>repeated .google.api.servicemanagement.v1.ManagedService services = 1;</code>
*/
public com.google.api.servicemanagement.v1.ManagedService.Builder addServicesBuilder() {
return getServicesFieldBuilder()
.addBuilder(com.google.api.servicemanagement.v1.ManagedService.getDefaultInstance());
}
/**
*
*
* <pre>
* The returned services will only have the name field set.
* </pre>
*
* <code>repeated .google.api.servicemanagement.v1.ManagedService services = 1;</code>
*/
public com.google.api.servicemanagement.v1.ManagedService.Builder addServicesBuilder(
int index) {
return getServicesFieldBuilder()
.addBuilder(
index, com.google.api.servicemanagement.v1.ManagedService.getDefaultInstance());
}
/**
*
*
* <pre>
* The returned services will only have the name field set.
* </pre>
*
* <code>repeated .google.api.servicemanagement.v1.ManagedService services = 1;</code>
*/
public java.util.List<com.google.api.servicemanagement.v1.ManagedService.Builder>
getServicesBuilderList() {
return getServicesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.api.servicemanagement.v1.ManagedService,
com.google.api.servicemanagement.v1.ManagedService.Builder,
com.google.api.servicemanagement.v1.ManagedServiceOrBuilder>
getServicesFieldBuilder() {
if (servicesBuilder_ == null) {
servicesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.api.servicemanagement.v1.ManagedService,
com.google.api.servicemanagement.v1.ManagedService.Builder,
com.google.api.servicemanagement.v1.ManagedServiceOrBuilder>(
services_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
services_ = null;
}
return servicesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token that can be passed to `ListServices` to resume a paginated query.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token that can be passed to `ListServices` to resume a paginated query.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token that can be passed to `ListServices` to resume a paginated query.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token that can be passed to `ListServices` to resume a paginated query.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Token that can be passed to `ListServices` to resume a paginated query.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.api.servicemanagement.v1.ListServicesResponse)
}
// @@protoc_insertion_point(class_scope:google.api.servicemanagement.v1.ListServicesResponse)
private static final com.google.api.servicemanagement.v1.ListServicesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.api.servicemanagement.v1.ListServicesResponse();
}
public static com.google.api.servicemanagement.v1.ListServicesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListServicesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListServicesResponse>() {
@java.lang.Override
public ListServicesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListServicesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListServicesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.api.servicemanagement.v1.ListServicesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,959 | java-maps-mapsplatformdatasets/proto-google-maps-mapsplatformdatasets-v1/src/main/java/com/google/maps/mapsplatformdatasets/v1/ListDatasetsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/maps/mapsplatformdatasets/v1/maps_platform_datasets.proto
// Protobuf Java Version: 3.25.8
package com.google.maps.mapsplatformdatasets.v1;
/**
*
*
* <pre>
* Response object of ListDatasets.
* </pre>
*
* Protobuf type {@code google.maps.mapsplatformdatasets.v1.ListDatasetsResponse}
*/
public final class ListDatasetsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.maps.mapsplatformdatasets.v1.ListDatasetsResponse)
ListDatasetsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListDatasetsResponse.newBuilder() to construct.
private ListDatasetsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListDatasetsResponse() {
datasets_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListDatasetsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.maps.mapsplatformdatasets.v1.MapsPlatformDatasetsProto
.internal_static_google_maps_mapsplatformdatasets_v1_ListDatasetsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.maps.mapsplatformdatasets.v1.MapsPlatformDatasetsProto
.internal_static_google_maps_mapsplatformdatasets_v1_ListDatasetsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse.class,
com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse.Builder.class);
}
public static final int DATASETS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.maps.mapsplatformdatasets.v1.Dataset> datasets_;
/**
*
*
* <pre>
* All the datasets for the project.
* </pre>
*
* <code>repeated .google.maps.mapsplatformdatasets.v1.Dataset datasets = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.maps.mapsplatformdatasets.v1.Dataset> getDatasetsList() {
return datasets_;
}
/**
*
*
* <pre>
* All the datasets for the project.
* </pre>
*
* <code>repeated .google.maps.mapsplatformdatasets.v1.Dataset datasets = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.maps.mapsplatformdatasets.v1.DatasetOrBuilder>
getDatasetsOrBuilderList() {
return datasets_;
}
/**
*
*
* <pre>
* All the datasets for the project.
* </pre>
*
* <code>repeated .google.maps.mapsplatformdatasets.v1.Dataset datasets = 1;</code>
*/
@java.lang.Override
public int getDatasetsCount() {
return datasets_.size();
}
/**
*
*
* <pre>
* All the datasets for the project.
* </pre>
*
* <code>repeated .google.maps.mapsplatformdatasets.v1.Dataset datasets = 1;</code>
*/
@java.lang.Override
public com.google.maps.mapsplatformdatasets.v1.Dataset getDatasets(int index) {
return datasets_.get(index);
}
/**
*
*
* <pre>
* All the datasets for the project.
* </pre>
*
* <code>repeated .google.maps.mapsplatformdatasets.v1.Dataset datasets = 1;</code>
*/
@java.lang.Override
public com.google.maps.mapsplatformdatasets.v1.DatasetOrBuilder getDatasetsOrBuilder(int index) {
return datasets_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page.
*
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page.
*
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < datasets_.size(); i++) {
output.writeMessage(1, datasets_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < datasets_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, datasets_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse)) {
return super.equals(obj);
}
com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse other =
(com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse) obj;
if (!getDatasetsList().equals(other.getDatasetsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDatasetsCount() > 0) {
hash = (37 * hash) + DATASETS_FIELD_NUMBER;
hash = (53 * hash) + getDatasetsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response object of ListDatasets.
* </pre>
*
* Protobuf type {@code google.maps.mapsplatformdatasets.v1.ListDatasetsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.maps.mapsplatformdatasets.v1.ListDatasetsResponse)
com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.maps.mapsplatformdatasets.v1.MapsPlatformDatasetsProto
.internal_static_google_maps_mapsplatformdatasets_v1_ListDatasetsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.maps.mapsplatformdatasets.v1.MapsPlatformDatasetsProto
.internal_static_google_maps_mapsplatformdatasets_v1_ListDatasetsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse.class,
com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse.Builder.class);
}
// Construct using com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (datasetsBuilder_ == null) {
datasets_ = java.util.Collections.emptyList();
} else {
datasets_ = null;
datasetsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.maps.mapsplatformdatasets.v1.MapsPlatformDatasetsProto
.internal_static_google_maps_mapsplatformdatasets_v1_ListDatasetsResponse_descriptor;
}
@java.lang.Override
public com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse
getDefaultInstanceForType() {
return com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse build() {
com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse buildPartial() {
com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse result =
new com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse result) {
if (datasetsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
datasets_ = java.util.Collections.unmodifiableList(datasets_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.datasets_ = datasets_;
} else {
result.datasets_ = datasetsBuilder_.build();
}
}
private void buildPartial0(
com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse) {
return mergeFrom((com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse other) {
if (other
== com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse.getDefaultInstance())
return this;
if (datasetsBuilder_ == null) {
if (!other.datasets_.isEmpty()) {
if (datasets_.isEmpty()) {
datasets_ = other.datasets_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDatasetsIsMutable();
datasets_.addAll(other.datasets_);
}
onChanged();
}
} else {
if (!other.datasets_.isEmpty()) {
if (datasetsBuilder_.isEmpty()) {
datasetsBuilder_.dispose();
datasetsBuilder_ = null;
datasets_ = other.datasets_;
bitField0_ = (bitField0_ & ~0x00000001);
datasetsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getDatasetsFieldBuilder()
: null;
} else {
datasetsBuilder_.addAllMessages(other.datasets_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.maps.mapsplatformdatasets.v1.Dataset m =
input.readMessage(
com.google.maps.mapsplatformdatasets.v1.Dataset.parser(),
extensionRegistry);
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.add(m);
} else {
datasetsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.maps.mapsplatformdatasets.v1.Dataset> datasets_ =
java.util.Collections.emptyList();
private void ensureDatasetsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
datasets_ =
new java.util.ArrayList<com.google.maps.mapsplatformdatasets.v1.Dataset>(datasets_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.maps.mapsplatformdatasets.v1.Dataset,
com.google.maps.mapsplatformdatasets.v1.Dataset.Builder,
com.google.maps.mapsplatformdatasets.v1.DatasetOrBuilder>
datasetsBuilder_;
/**
*
*
* <pre>
* All the datasets for the project.
* </pre>
*
* <code>repeated .google.maps.mapsplatformdatasets.v1.Dataset datasets = 1;</code>
*/
public java.util.List<com.google.maps.mapsplatformdatasets.v1.Dataset> getDatasetsList() {
if (datasetsBuilder_ == null) {
return java.util.Collections.unmodifiableList(datasets_);
} else {
return datasetsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* All the datasets for the project.
* </pre>
*
* <code>repeated .google.maps.mapsplatformdatasets.v1.Dataset datasets = 1;</code>
*/
public int getDatasetsCount() {
if (datasetsBuilder_ == null) {
return datasets_.size();
} else {
return datasetsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* All the datasets for the project.
* </pre>
*
* <code>repeated .google.maps.mapsplatformdatasets.v1.Dataset datasets = 1;</code>
*/
public com.google.maps.mapsplatformdatasets.v1.Dataset getDatasets(int index) {
if (datasetsBuilder_ == null) {
return datasets_.get(index);
} else {
return datasetsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* All the datasets for the project.
* </pre>
*
* <code>repeated .google.maps.mapsplatformdatasets.v1.Dataset datasets = 1;</code>
*/
public Builder setDatasets(int index, com.google.maps.mapsplatformdatasets.v1.Dataset value) {
if (datasetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDatasetsIsMutable();
datasets_.set(index, value);
onChanged();
} else {
datasetsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All the datasets for the project.
* </pre>
*
* <code>repeated .google.maps.mapsplatformdatasets.v1.Dataset datasets = 1;</code>
*/
public Builder setDatasets(
int index, com.google.maps.mapsplatformdatasets.v1.Dataset.Builder builderForValue) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.set(index, builderForValue.build());
onChanged();
} else {
datasetsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the datasets for the project.
* </pre>
*
* <code>repeated .google.maps.mapsplatformdatasets.v1.Dataset datasets = 1;</code>
*/
public Builder addDatasets(com.google.maps.mapsplatformdatasets.v1.Dataset value) {
if (datasetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDatasetsIsMutable();
datasets_.add(value);
onChanged();
} else {
datasetsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* All the datasets for the project.
* </pre>
*
* <code>repeated .google.maps.mapsplatformdatasets.v1.Dataset datasets = 1;</code>
*/
public Builder addDatasets(int index, com.google.maps.mapsplatformdatasets.v1.Dataset value) {
if (datasetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDatasetsIsMutable();
datasets_.add(index, value);
onChanged();
} else {
datasetsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All the datasets for the project.
* </pre>
*
* <code>repeated .google.maps.mapsplatformdatasets.v1.Dataset datasets = 1;</code>
*/
public Builder addDatasets(
com.google.maps.mapsplatformdatasets.v1.Dataset.Builder builderForValue) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.add(builderForValue.build());
onChanged();
} else {
datasetsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the datasets for the project.
* </pre>
*
* <code>repeated .google.maps.mapsplatformdatasets.v1.Dataset datasets = 1;</code>
*/
public Builder addDatasets(
int index, com.google.maps.mapsplatformdatasets.v1.Dataset.Builder builderForValue) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.add(index, builderForValue.build());
onChanged();
} else {
datasetsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the datasets for the project.
* </pre>
*
* <code>repeated .google.maps.mapsplatformdatasets.v1.Dataset datasets = 1;</code>
*/
public Builder addAllDatasets(
java.lang.Iterable<? extends com.google.maps.mapsplatformdatasets.v1.Dataset> values) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, datasets_);
onChanged();
} else {
datasetsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* All the datasets for the project.
* </pre>
*
* <code>repeated .google.maps.mapsplatformdatasets.v1.Dataset datasets = 1;</code>
*/
public Builder clearDatasets() {
if (datasetsBuilder_ == null) {
datasets_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
datasetsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* All the datasets for the project.
* </pre>
*
* <code>repeated .google.maps.mapsplatformdatasets.v1.Dataset datasets = 1;</code>
*/
public Builder removeDatasets(int index) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.remove(index);
onChanged();
} else {
datasetsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* All the datasets for the project.
* </pre>
*
* <code>repeated .google.maps.mapsplatformdatasets.v1.Dataset datasets = 1;</code>
*/
public com.google.maps.mapsplatformdatasets.v1.Dataset.Builder getDatasetsBuilder(int index) {
return getDatasetsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* All the datasets for the project.
* </pre>
*
* <code>repeated .google.maps.mapsplatformdatasets.v1.Dataset datasets = 1;</code>
*/
public com.google.maps.mapsplatformdatasets.v1.DatasetOrBuilder getDatasetsOrBuilder(
int index) {
if (datasetsBuilder_ == null) {
return datasets_.get(index);
} else {
return datasetsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* All the datasets for the project.
* </pre>
*
* <code>repeated .google.maps.mapsplatformdatasets.v1.Dataset datasets = 1;</code>
*/
public java.util.List<? extends com.google.maps.mapsplatformdatasets.v1.DatasetOrBuilder>
getDatasetsOrBuilderList() {
if (datasetsBuilder_ != null) {
return datasetsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(datasets_);
}
}
/**
*
*
* <pre>
* All the datasets for the project.
* </pre>
*
* <code>repeated .google.maps.mapsplatformdatasets.v1.Dataset datasets = 1;</code>
*/
public com.google.maps.mapsplatformdatasets.v1.Dataset.Builder addDatasetsBuilder() {
return getDatasetsFieldBuilder()
.addBuilder(com.google.maps.mapsplatformdatasets.v1.Dataset.getDefaultInstance());
}
/**
*
*
* <pre>
* All the datasets for the project.
* </pre>
*
* <code>repeated .google.maps.mapsplatformdatasets.v1.Dataset datasets = 1;</code>
*/
public com.google.maps.mapsplatformdatasets.v1.Dataset.Builder addDatasetsBuilder(int index) {
return getDatasetsFieldBuilder()
.addBuilder(index, com.google.maps.mapsplatformdatasets.v1.Dataset.getDefaultInstance());
}
/**
*
*
* <pre>
* All the datasets for the project.
* </pre>
*
* <code>repeated .google.maps.mapsplatformdatasets.v1.Dataset datasets = 1;</code>
*/
public java.util.List<com.google.maps.mapsplatformdatasets.v1.Dataset.Builder>
getDatasetsBuilderList() {
return getDatasetsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.maps.mapsplatformdatasets.v1.Dataset,
com.google.maps.mapsplatformdatasets.v1.Dataset.Builder,
com.google.maps.mapsplatformdatasets.v1.DatasetOrBuilder>
getDatasetsFieldBuilder() {
if (datasetsBuilder_ == null) {
datasetsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.maps.mapsplatformdatasets.v1.Dataset,
com.google.maps.mapsplatformdatasets.v1.Dataset.Builder,
com.google.maps.mapsplatformdatasets.v1.DatasetOrBuilder>(
datasets_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
datasets_ = null;
}
return datasetsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page.
*
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page.
*
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page.
*
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page.
*
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page.
*
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.maps.mapsplatformdatasets.v1.ListDatasetsResponse)
}
// @@protoc_insertion_point(class_scope:google.maps.mapsplatformdatasets.v1.ListDatasetsResponse)
private static final com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse();
}
public static com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListDatasetsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListDatasetsResponse>() {
@java.lang.Override
public ListDatasetsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListDatasetsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListDatasetsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.maps.mapsplatformdatasets.v1.ListDatasetsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,972 | java-container/proto-google-cloud-container-v1beta1/src/main/java/com/google/container/v1beta1/CheckAutopilotCompatibilityResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/container/v1beta1/cluster_service.proto
// Protobuf Java Version: 3.25.8
package com.google.container.v1beta1;
/**
*
*
* <pre>
* CheckAutopilotCompatibilityResponse has a list of compatibility issues.
* </pre>
*
* Protobuf type {@code google.container.v1beta1.CheckAutopilotCompatibilityResponse}
*/
public final class CheckAutopilotCompatibilityResponse
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.container.v1beta1.CheckAutopilotCompatibilityResponse)
CheckAutopilotCompatibilityResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use CheckAutopilotCompatibilityResponse.newBuilder() to construct.
private CheckAutopilotCompatibilityResponse(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CheckAutopilotCompatibilityResponse() {
issues_ = java.util.Collections.emptyList();
summary_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CheckAutopilotCompatibilityResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_CheckAutopilotCompatibilityResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_CheckAutopilotCompatibilityResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.container.v1beta1.CheckAutopilotCompatibilityResponse.class,
com.google.container.v1beta1.CheckAutopilotCompatibilityResponse.Builder.class);
}
public static final int ISSUES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.container.v1beta1.AutopilotCompatibilityIssue> issues_;
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1beta1.AutopilotCompatibilityIssue issues = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.container.v1beta1.AutopilotCompatibilityIssue> getIssuesList() {
return issues_;
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1beta1.AutopilotCompatibilityIssue issues = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.container.v1beta1.AutopilotCompatibilityIssueOrBuilder>
getIssuesOrBuilderList() {
return issues_;
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1beta1.AutopilotCompatibilityIssue issues = 1;</code>
*/
@java.lang.Override
public int getIssuesCount() {
return issues_.size();
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1beta1.AutopilotCompatibilityIssue issues = 1;</code>
*/
@java.lang.Override
public com.google.container.v1beta1.AutopilotCompatibilityIssue getIssues(int index) {
return issues_.get(index);
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1beta1.AutopilotCompatibilityIssue issues = 1;</code>
*/
@java.lang.Override
public com.google.container.v1beta1.AutopilotCompatibilityIssueOrBuilder getIssuesOrBuilder(
int index) {
return issues_.get(index);
}
public static final int SUMMARY_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object summary_ = "";
/**
*
*
* <pre>
* The summary of the autopilot compatibility response.
* </pre>
*
* <code>string summary = 2;</code>
*
* @return The summary.
*/
@java.lang.Override
public java.lang.String getSummary() {
java.lang.Object ref = summary_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
summary_ = s;
return s;
}
}
/**
*
*
* <pre>
* The summary of the autopilot compatibility response.
* </pre>
*
* <code>string summary = 2;</code>
*
* @return The bytes for summary.
*/
@java.lang.Override
public com.google.protobuf.ByteString getSummaryBytes() {
java.lang.Object ref = summary_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
summary_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < issues_.size(); i++) {
output.writeMessage(1, issues_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(summary_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, summary_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < issues_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, issues_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(summary_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, summary_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.container.v1beta1.CheckAutopilotCompatibilityResponse)) {
return super.equals(obj);
}
com.google.container.v1beta1.CheckAutopilotCompatibilityResponse other =
(com.google.container.v1beta1.CheckAutopilotCompatibilityResponse) obj;
if (!getIssuesList().equals(other.getIssuesList())) return false;
if (!getSummary().equals(other.getSummary())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getIssuesCount() > 0) {
hash = (37 * hash) + ISSUES_FIELD_NUMBER;
hash = (53 * hash) + getIssuesList().hashCode();
}
hash = (37 * hash) + SUMMARY_FIELD_NUMBER;
hash = (53 * hash) + getSummary().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.container.v1beta1.CheckAutopilotCompatibilityResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1beta1.CheckAutopilotCompatibilityResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1beta1.CheckAutopilotCompatibilityResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1beta1.CheckAutopilotCompatibilityResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1beta1.CheckAutopilotCompatibilityResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1beta1.CheckAutopilotCompatibilityResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1beta1.CheckAutopilotCompatibilityResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.container.v1beta1.CheckAutopilotCompatibilityResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.container.v1beta1.CheckAutopilotCompatibilityResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.container.v1beta1.CheckAutopilotCompatibilityResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.container.v1beta1.CheckAutopilotCompatibilityResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.container.v1beta1.CheckAutopilotCompatibilityResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.container.v1beta1.CheckAutopilotCompatibilityResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* CheckAutopilotCompatibilityResponse has a list of compatibility issues.
* </pre>
*
* Protobuf type {@code google.container.v1beta1.CheckAutopilotCompatibilityResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.container.v1beta1.CheckAutopilotCompatibilityResponse)
com.google.container.v1beta1.CheckAutopilotCompatibilityResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_CheckAutopilotCompatibilityResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_CheckAutopilotCompatibilityResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.container.v1beta1.CheckAutopilotCompatibilityResponse.class,
com.google.container.v1beta1.CheckAutopilotCompatibilityResponse.Builder.class);
}
// Construct using com.google.container.v1beta1.CheckAutopilotCompatibilityResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (issuesBuilder_ == null) {
issues_ = java.util.Collections.emptyList();
} else {
issues_ = null;
issuesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
summary_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_CheckAutopilotCompatibilityResponse_descriptor;
}
@java.lang.Override
public com.google.container.v1beta1.CheckAutopilotCompatibilityResponse
getDefaultInstanceForType() {
return com.google.container.v1beta1.CheckAutopilotCompatibilityResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.container.v1beta1.CheckAutopilotCompatibilityResponse build() {
com.google.container.v1beta1.CheckAutopilotCompatibilityResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.container.v1beta1.CheckAutopilotCompatibilityResponse buildPartial() {
com.google.container.v1beta1.CheckAutopilotCompatibilityResponse result =
new com.google.container.v1beta1.CheckAutopilotCompatibilityResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.container.v1beta1.CheckAutopilotCompatibilityResponse result) {
if (issuesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
issues_ = java.util.Collections.unmodifiableList(issues_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.issues_ = issues_;
} else {
result.issues_ = issuesBuilder_.build();
}
}
private void buildPartial0(
com.google.container.v1beta1.CheckAutopilotCompatibilityResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.summary_ = summary_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.container.v1beta1.CheckAutopilotCompatibilityResponse) {
return mergeFrom((com.google.container.v1beta1.CheckAutopilotCompatibilityResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.container.v1beta1.CheckAutopilotCompatibilityResponse other) {
if (other
== com.google.container.v1beta1.CheckAutopilotCompatibilityResponse.getDefaultInstance())
return this;
if (issuesBuilder_ == null) {
if (!other.issues_.isEmpty()) {
if (issues_.isEmpty()) {
issues_ = other.issues_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureIssuesIsMutable();
issues_.addAll(other.issues_);
}
onChanged();
}
} else {
if (!other.issues_.isEmpty()) {
if (issuesBuilder_.isEmpty()) {
issuesBuilder_.dispose();
issuesBuilder_ = null;
issues_ = other.issues_;
bitField0_ = (bitField0_ & ~0x00000001);
issuesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getIssuesFieldBuilder()
: null;
} else {
issuesBuilder_.addAllMessages(other.issues_);
}
}
}
if (!other.getSummary().isEmpty()) {
summary_ = other.summary_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.container.v1beta1.AutopilotCompatibilityIssue m =
input.readMessage(
com.google.container.v1beta1.AutopilotCompatibilityIssue.parser(),
extensionRegistry);
if (issuesBuilder_ == null) {
ensureIssuesIsMutable();
issues_.add(m);
} else {
issuesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
summary_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.container.v1beta1.AutopilotCompatibilityIssue> issues_ =
java.util.Collections.emptyList();
private void ensureIssuesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
issues_ =
new java.util.ArrayList<com.google.container.v1beta1.AutopilotCompatibilityIssue>(
issues_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.container.v1beta1.AutopilotCompatibilityIssue,
com.google.container.v1beta1.AutopilotCompatibilityIssue.Builder,
com.google.container.v1beta1.AutopilotCompatibilityIssueOrBuilder>
issuesBuilder_;
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1beta1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public java.util.List<com.google.container.v1beta1.AutopilotCompatibilityIssue>
getIssuesList() {
if (issuesBuilder_ == null) {
return java.util.Collections.unmodifiableList(issues_);
} else {
return issuesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1beta1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public int getIssuesCount() {
if (issuesBuilder_ == null) {
return issues_.size();
} else {
return issuesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1beta1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public com.google.container.v1beta1.AutopilotCompatibilityIssue getIssues(int index) {
if (issuesBuilder_ == null) {
return issues_.get(index);
} else {
return issuesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1beta1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public Builder setIssues(
int index, com.google.container.v1beta1.AutopilotCompatibilityIssue value) {
if (issuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIssuesIsMutable();
issues_.set(index, value);
onChanged();
} else {
issuesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1beta1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public Builder setIssues(
int index,
com.google.container.v1beta1.AutopilotCompatibilityIssue.Builder builderForValue) {
if (issuesBuilder_ == null) {
ensureIssuesIsMutable();
issues_.set(index, builderForValue.build());
onChanged();
} else {
issuesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1beta1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public Builder addIssues(com.google.container.v1beta1.AutopilotCompatibilityIssue value) {
if (issuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIssuesIsMutable();
issues_.add(value);
onChanged();
} else {
issuesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1beta1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public Builder addIssues(
int index, com.google.container.v1beta1.AutopilotCompatibilityIssue value) {
if (issuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIssuesIsMutable();
issues_.add(index, value);
onChanged();
} else {
issuesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1beta1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public Builder addIssues(
com.google.container.v1beta1.AutopilotCompatibilityIssue.Builder builderForValue) {
if (issuesBuilder_ == null) {
ensureIssuesIsMutable();
issues_.add(builderForValue.build());
onChanged();
} else {
issuesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1beta1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public Builder addIssues(
int index,
com.google.container.v1beta1.AutopilotCompatibilityIssue.Builder builderForValue) {
if (issuesBuilder_ == null) {
ensureIssuesIsMutable();
issues_.add(index, builderForValue.build());
onChanged();
} else {
issuesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1beta1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public Builder addAllIssues(
java.lang.Iterable<? extends com.google.container.v1beta1.AutopilotCompatibilityIssue>
values) {
if (issuesBuilder_ == null) {
ensureIssuesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, issues_);
onChanged();
} else {
issuesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1beta1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public Builder clearIssues() {
if (issuesBuilder_ == null) {
issues_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
issuesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1beta1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public Builder removeIssues(int index) {
if (issuesBuilder_ == null) {
ensureIssuesIsMutable();
issues_.remove(index);
onChanged();
} else {
issuesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1beta1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public com.google.container.v1beta1.AutopilotCompatibilityIssue.Builder getIssuesBuilder(
int index) {
return getIssuesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1beta1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public com.google.container.v1beta1.AutopilotCompatibilityIssueOrBuilder getIssuesOrBuilder(
int index) {
if (issuesBuilder_ == null) {
return issues_.get(index);
} else {
return issuesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1beta1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public java.util.List<
? extends com.google.container.v1beta1.AutopilotCompatibilityIssueOrBuilder>
getIssuesOrBuilderList() {
if (issuesBuilder_ != null) {
return issuesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(issues_);
}
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1beta1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public com.google.container.v1beta1.AutopilotCompatibilityIssue.Builder addIssuesBuilder() {
return getIssuesFieldBuilder()
.addBuilder(
com.google.container.v1beta1.AutopilotCompatibilityIssue.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1beta1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public com.google.container.v1beta1.AutopilotCompatibilityIssue.Builder addIssuesBuilder(
int index) {
return getIssuesFieldBuilder()
.addBuilder(
index, com.google.container.v1beta1.AutopilotCompatibilityIssue.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1beta1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public java.util.List<com.google.container.v1beta1.AutopilotCompatibilityIssue.Builder>
getIssuesBuilderList() {
return getIssuesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.container.v1beta1.AutopilotCompatibilityIssue,
com.google.container.v1beta1.AutopilotCompatibilityIssue.Builder,
com.google.container.v1beta1.AutopilotCompatibilityIssueOrBuilder>
getIssuesFieldBuilder() {
if (issuesBuilder_ == null) {
issuesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.container.v1beta1.AutopilotCompatibilityIssue,
com.google.container.v1beta1.AutopilotCompatibilityIssue.Builder,
com.google.container.v1beta1.AutopilotCompatibilityIssueOrBuilder>(
issues_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
issues_ = null;
}
return issuesBuilder_;
}
private java.lang.Object summary_ = "";
/**
*
*
* <pre>
* The summary of the autopilot compatibility response.
* </pre>
*
* <code>string summary = 2;</code>
*
* @return The summary.
*/
public java.lang.String getSummary() {
java.lang.Object ref = summary_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
summary_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The summary of the autopilot compatibility response.
* </pre>
*
* <code>string summary = 2;</code>
*
* @return The bytes for summary.
*/
public com.google.protobuf.ByteString getSummaryBytes() {
java.lang.Object ref = summary_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
summary_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The summary of the autopilot compatibility response.
* </pre>
*
* <code>string summary = 2;</code>
*
* @param value The summary to set.
* @return This builder for chaining.
*/
public Builder setSummary(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
summary_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The summary of the autopilot compatibility response.
* </pre>
*
* <code>string summary = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearSummary() {
summary_ = getDefaultInstance().getSummary();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The summary of the autopilot compatibility response.
* </pre>
*
* <code>string summary = 2;</code>
*
* @param value The bytes for summary to set.
* @return This builder for chaining.
*/
public Builder setSummaryBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
summary_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.container.v1beta1.CheckAutopilotCompatibilityResponse)
}
// @@protoc_insertion_point(class_scope:google.container.v1beta1.CheckAutopilotCompatibilityResponse)
private static final com.google.container.v1beta1.CheckAutopilotCompatibilityResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.container.v1beta1.CheckAutopilotCompatibilityResponse();
}
public static com.google.container.v1beta1.CheckAutopilotCompatibilityResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CheckAutopilotCompatibilityResponse> PARSER =
new com.google.protobuf.AbstractParser<CheckAutopilotCompatibilityResponse>() {
@java.lang.Override
public CheckAutopilotCompatibilityResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CheckAutopilotCompatibilityResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CheckAutopilotCompatibilityResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.container.v1beta1.CheckAutopilotCompatibilityResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,268 | java-batch/google-cloud-batch/src/main/java/com/google/cloud/batch/v1/stub/BatchServiceStubSettings.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.batch.v1.stub;
import static com.google.cloud.batch.v1.BatchServiceClient.ListJobsPagedResponse;
import static com.google.cloud.batch.v1.BatchServiceClient.ListLocationsPagedResponse;
import static com.google.cloud.batch.v1.BatchServiceClient.ListTasksPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.BetaApi;
import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.GrpcTransportChannel;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.grpc.ProtoOperationTransformers;
import com.google.api.gax.httpjson.GaxHttpJsonProperties;
import com.google.api.gax.httpjson.HttpJsonTransportChannel;
import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider;
import com.google.api.gax.longrunning.OperationSnapshot;
import com.google.api.gax.longrunning.OperationTimedPollAlgorithm;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallSettings;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.batch.v1.CancelJobRequest;
import com.google.cloud.batch.v1.CancelJobResponse;
import com.google.cloud.batch.v1.CreateJobRequest;
import com.google.cloud.batch.v1.DeleteJobRequest;
import com.google.cloud.batch.v1.GetJobRequest;
import com.google.cloud.batch.v1.GetTaskRequest;
import com.google.cloud.batch.v1.Job;
import com.google.cloud.batch.v1.ListJobsRequest;
import com.google.cloud.batch.v1.ListJobsResponse;
import com.google.cloud.batch.v1.ListTasksRequest;
import com.google.cloud.batch.v1.ListTasksResponse;
import com.google.cloud.batch.v1.OperationMetadata;
import com.google.cloud.batch.v1.Task;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.longrunning.Operation;
import com.google.protobuf.Empty;
import java.io.IOException;
import java.time.Duration;
import java.util.List;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link BatchServiceStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (batch.googleapis.com) and default port (443) are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the
* [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings)
* of createJob:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* BatchServiceStubSettings.Builder batchServiceSettingsBuilder =
* BatchServiceStubSettings.newBuilder();
* batchServiceSettingsBuilder
* .createJobSettings()
* .setRetrySettings(
* batchServiceSettingsBuilder
* .createJobSettings()
* .getRetrySettings()
* .toBuilder()
* .setInitialRetryDelayDuration(Duration.ofSeconds(1))
* .setInitialRpcTimeoutDuration(Duration.ofSeconds(5))
* .setMaxAttempts(5)
* .setMaxRetryDelayDuration(Duration.ofSeconds(30))
* .setMaxRpcTimeoutDuration(Duration.ofSeconds(60))
* .setRetryDelayMultiplier(1.3)
* .setRpcTimeoutMultiplier(1.5)
* .setTotalTimeoutDuration(Duration.ofSeconds(300))
* .build());
* BatchServiceStubSettings batchServiceSettings = batchServiceSettingsBuilder.build();
* }</pre>
*
* Please refer to the [Client Side Retry
* Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for
* additional support in setting retries.
*
* <p>To configure the RetrySettings of a Long Running Operation method, create an
* OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to
* configure the RetrySettings for deleteJob:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* BatchServiceStubSettings.Builder batchServiceSettingsBuilder =
* BatchServiceStubSettings.newBuilder();
* TimedRetryAlgorithm timedRetryAlgorithm =
* OperationalTimedPollAlgorithm.create(
* RetrySettings.newBuilder()
* .setInitialRetryDelayDuration(Duration.ofMillis(500))
* .setRetryDelayMultiplier(1.5)
* .setMaxRetryDelayDuration(Duration.ofMillis(5000))
* .setTotalTimeoutDuration(Duration.ofHours(24))
* .build());
* batchServiceSettingsBuilder
* .createClusterOperationSettings()
* .setPollingAlgorithm(timedRetryAlgorithm)
* .build();
* }</pre>
*/
@Generated("by gapic-generator-java")
public class BatchServiceStubSettings extends StubSettings<BatchServiceStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder().add("https://www.googleapis.com/auth/cloud-platform").build();
private final UnaryCallSettings<CreateJobRequest, Job> createJobSettings;
private final UnaryCallSettings<GetJobRequest, Job> getJobSettings;
private final UnaryCallSettings<DeleteJobRequest, Operation> deleteJobSettings;
private final OperationCallSettings<DeleteJobRequest, Empty, OperationMetadata>
deleteJobOperationSettings;
private final UnaryCallSettings<CancelJobRequest, Operation> cancelJobSettings;
private final OperationCallSettings<CancelJobRequest, CancelJobResponse, OperationMetadata>
cancelJobOperationSettings;
private final PagedCallSettings<ListJobsRequest, ListJobsResponse, ListJobsPagedResponse>
listJobsSettings;
private final UnaryCallSettings<GetTaskRequest, Task> getTaskSettings;
private final PagedCallSettings<ListTasksRequest, ListTasksResponse, ListTasksPagedResponse>
listTasksSettings;
private final PagedCallSettings<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings;
private final UnaryCallSettings<GetLocationRequest, Location> getLocationSettings;
private static final PagedListDescriptor<ListJobsRequest, ListJobsResponse, Job>
LIST_JOBS_PAGE_STR_DESC =
new PagedListDescriptor<ListJobsRequest, ListJobsResponse, Job>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListJobsRequest injectToken(ListJobsRequest payload, String token) {
return ListJobsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListJobsRequest injectPageSize(ListJobsRequest payload, int pageSize) {
return ListJobsRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListJobsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListJobsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Job> extractResources(ListJobsResponse payload) {
return payload.getJobsList();
}
};
private static final PagedListDescriptor<ListTasksRequest, ListTasksResponse, Task>
LIST_TASKS_PAGE_STR_DESC =
new PagedListDescriptor<ListTasksRequest, ListTasksResponse, Task>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListTasksRequest injectToken(ListTasksRequest payload, String token) {
return ListTasksRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListTasksRequest injectPageSize(ListTasksRequest payload, int pageSize) {
return ListTasksRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListTasksRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListTasksResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Task> extractResources(ListTasksResponse payload) {
return payload.getTasksList();
}
};
private static final PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location>
LIST_LOCATIONS_PAGE_STR_DESC =
new PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListLocationsRequest injectToken(ListLocationsRequest payload, String token) {
return ListLocationsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListLocationsRequest injectPageSize(ListLocationsRequest payload, int pageSize) {
return ListLocationsRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListLocationsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListLocationsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Location> extractResources(ListLocationsResponse payload) {
return payload.getLocationsList();
}
};
private static final PagedListResponseFactory<
ListJobsRequest, ListJobsResponse, ListJobsPagedResponse>
LIST_JOBS_PAGE_STR_FACT =
new PagedListResponseFactory<ListJobsRequest, ListJobsResponse, ListJobsPagedResponse>() {
@Override
public ApiFuture<ListJobsPagedResponse> getFuturePagedResponse(
UnaryCallable<ListJobsRequest, ListJobsResponse> callable,
ListJobsRequest request,
ApiCallContext context,
ApiFuture<ListJobsResponse> futureResponse) {
PageContext<ListJobsRequest, ListJobsResponse, Job> pageContext =
PageContext.create(callable, LIST_JOBS_PAGE_STR_DESC, request, context);
return ListJobsPagedResponse.createAsync(pageContext, futureResponse);
}
};
private static final PagedListResponseFactory<
ListTasksRequest, ListTasksResponse, ListTasksPagedResponse>
LIST_TASKS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListTasksRequest, ListTasksResponse, ListTasksPagedResponse>() {
@Override
public ApiFuture<ListTasksPagedResponse> getFuturePagedResponse(
UnaryCallable<ListTasksRequest, ListTasksResponse> callable,
ListTasksRequest request,
ApiCallContext context,
ApiFuture<ListTasksResponse> futureResponse) {
PageContext<ListTasksRequest, ListTasksResponse, Task> pageContext =
PageContext.create(callable, LIST_TASKS_PAGE_STR_DESC, request, context);
return ListTasksPagedResponse.createAsync(pageContext, futureResponse);
}
};
private static final PagedListResponseFactory<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
LIST_LOCATIONS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>() {
@Override
public ApiFuture<ListLocationsPagedResponse> getFuturePagedResponse(
UnaryCallable<ListLocationsRequest, ListLocationsResponse> callable,
ListLocationsRequest request,
ApiCallContext context,
ApiFuture<ListLocationsResponse> futureResponse) {
PageContext<ListLocationsRequest, ListLocationsResponse, Location> pageContext =
PageContext.create(callable, LIST_LOCATIONS_PAGE_STR_DESC, request, context);
return ListLocationsPagedResponse.createAsync(pageContext, futureResponse);
}
};
/** Returns the object with the settings used for calls to createJob. */
public UnaryCallSettings<CreateJobRequest, Job> createJobSettings() {
return createJobSettings;
}
/** Returns the object with the settings used for calls to getJob. */
public UnaryCallSettings<GetJobRequest, Job> getJobSettings() {
return getJobSettings;
}
/** Returns the object with the settings used for calls to deleteJob. */
public UnaryCallSettings<DeleteJobRequest, Operation> deleteJobSettings() {
return deleteJobSettings;
}
/** Returns the object with the settings used for calls to deleteJob. */
public OperationCallSettings<DeleteJobRequest, Empty, OperationMetadata>
deleteJobOperationSettings() {
return deleteJobOperationSettings;
}
/** Returns the object with the settings used for calls to cancelJob. */
public UnaryCallSettings<CancelJobRequest, Operation> cancelJobSettings() {
return cancelJobSettings;
}
/** Returns the object with the settings used for calls to cancelJob. */
public OperationCallSettings<CancelJobRequest, CancelJobResponse, OperationMetadata>
cancelJobOperationSettings() {
return cancelJobOperationSettings;
}
/** Returns the object with the settings used for calls to listJobs. */
public PagedCallSettings<ListJobsRequest, ListJobsResponse, ListJobsPagedResponse>
listJobsSettings() {
return listJobsSettings;
}
/** Returns the object with the settings used for calls to getTask. */
public UnaryCallSettings<GetTaskRequest, Task> getTaskSettings() {
return getTaskSettings;
}
/** Returns the object with the settings used for calls to listTasks. */
public PagedCallSettings<ListTasksRequest, ListTasksResponse, ListTasksPagedResponse>
listTasksSettings() {
return listTasksSettings;
}
/** Returns the object with the settings used for calls to listLocations. */
public PagedCallSettings<ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings() {
return listLocationsSettings;
}
/** Returns the object with the settings used for calls to getLocation. */
public UnaryCallSettings<GetLocationRequest, Location> getLocationSettings() {
return getLocationSettings;
}
public BatchServiceStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(GrpcTransportChannel.getGrpcTransportName())) {
return GrpcBatchServiceStub.create(this);
}
if (getTransportChannelProvider()
.getTransportName()
.equals(HttpJsonTransportChannel.getHttpJsonTransportName())) {
return HttpJsonBatchServiceStub.create(this);
}
throw new UnsupportedOperationException(
String.format(
"Transport not supported: %s", getTransportChannelProvider().getTransportName()));
}
/** Returns the default service name. */
@Override
public String getServiceName() {
return "batch";
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
@ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return "batch.googleapis.com:443";
}
/** Returns the default mTLS service endpoint. */
public static String getDefaultMtlsEndpoint() {
return "batch.mtls.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder()
.setScopesToApply(DEFAULT_SERVICE_SCOPES)
.setUseJwtAccessWithScope(true);
}
/** Returns a builder for the default gRPC ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return InstantiatingGrpcChannelProvider.newBuilder()
.setMaxInboundMessageSize(Integer.MAX_VALUE);
}
/** Returns a builder for the default REST ChannelProvider for this service. */
@BetaApi
public static InstantiatingHttpJsonChannelProvider.Builder
defaultHttpJsonTransportProviderBuilder() {
return InstantiatingHttpJsonChannelProvider.newBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultGrpcTransportProviderBuilder().build();
}
public static ApiClientHeaderProvider.Builder defaultGrpcApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(BatchServiceStubSettings.class))
.setTransportToken(
GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion());
}
public static ApiClientHeaderProvider.Builder defaultHttpJsonApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(BatchServiceStubSettings.class))
.setTransportToken(
GaxHttpJsonProperties.getHttpJsonTokenName(),
GaxHttpJsonProperties.getHttpJsonVersion());
}
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return BatchServiceStubSettings.defaultGrpcApiClientHeaderProviderBuilder();
}
/** Returns a new gRPC builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new REST builder for this class. */
public static Builder newHttpJsonBuilder() {
return Builder.createHttpJsonDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected BatchServiceStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
createJobSettings = settingsBuilder.createJobSettings().build();
getJobSettings = settingsBuilder.getJobSettings().build();
deleteJobSettings = settingsBuilder.deleteJobSettings().build();
deleteJobOperationSettings = settingsBuilder.deleteJobOperationSettings().build();
cancelJobSettings = settingsBuilder.cancelJobSettings().build();
cancelJobOperationSettings = settingsBuilder.cancelJobOperationSettings().build();
listJobsSettings = settingsBuilder.listJobsSettings().build();
getTaskSettings = settingsBuilder.getTaskSettings().build();
listTasksSettings = settingsBuilder.listTasksSettings().build();
listLocationsSettings = settingsBuilder.listLocationsSettings().build();
getLocationSettings = settingsBuilder.getLocationSettings().build();
}
/** Builder for BatchServiceStubSettings. */
public static class Builder extends StubSettings.Builder<BatchServiceStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final UnaryCallSettings.Builder<CreateJobRequest, Job> createJobSettings;
private final UnaryCallSettings.Builder<GetJobRequest, Job> getJobSettings;
private final UnaryCallSettings.Builder<DeleteJobRequest, Operation> deleteJobSettings;
private final OperationCallSettings.Builder<DeleteJobRequest, Empty, OperationMetadata>
deleteJobOperationSettings;
private final UnaryCallSettings.Builder<CancelJobRequest, Operation> cancelJobSettings;
private final OperationCallSettings.Builder<
CancelJobRequest, CancelJobResponse, OperationMetadata>
cancelJobOperationSettings;
private final PagedCallSettings.Builder<
ListJobsRequest, ListJobsResponse, ListJobsPagedResponse>
listJobsSettings;
private final UnaryCallSettings.Builder<GetTaskRequest, Task> getTaskSettings;
private final PagedCallSettings.Builder<
ListTasksRequest, ListTasksResponse, ListTasksPagedResponse>
listTasksSettings;
private final PagedCallSettings.Builder<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings;
private final UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put(
"no_retry_1_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
definitions.put(
"retry_policy_0_codes",
ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNAVAILABLE)));
definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings =
RetrySettings.newBuilder()
.setInitialRpcTimeoutDuration(Duration.ofMillis(60000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(60000L))
.setTotalTimeoutDuration(Duration.ofMillis(60000L))
.build();
definitions.put("no_retry_1_params", settings);
settings =
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(1000L))
.setRetryDelayMultiplier(1.3)
.setMaxRetryDelayDuration(Duration.ofMillis(10000L))
.setInitialRpcTimeoutDuration(Duration.ofMillis(60000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(60000L))
.setTotalTimeoutDuration(Duration.ofMillis(60000L))
.build();
definitions.put("retry_policy_0_params", settings);
settings = RetrySettings.newBuilder().setRpcTimeoutMultiplier(1.0).build();
definitions.put("no_retry_params", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(clientContext);
createJobSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
getJobSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
deleteJobSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
deleteJobOperationSettings = OperationCallSettings.newBuilder();
cancelJobSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
cancelJobOperationSettings = OperationCallSettings.newBuilder();
listJobsSettings = PagedCallSettings.newBuilder(LIST_JOBS_PAGE_STR_FACT);
getTaskSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
listTasksSettings = PagedCallSettings.newBuilder(LIST_TASKS_PAGE_STR_FACT);
listLocationsSettings = PagedCallSettings.newBuilder(LIST_LOCATIONS_PAGE_STR_FACT);
getLocationSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
createJobSettings,
getJobSettings,
deleteJobSettings,
cancelJobSettings,
listJobsSettings,
getTaskSettings,
listTasksSettings,
listLocationsSettings,
getLocationSettings);
initDefaults(this);
}
protected Builder(BatchServiceStubSettings settings) {
super(settings);
createJobSettings = settings.createJobSettings.toBuilder();
getJobSettings = settings.getJobSettings.toBuilder();
deleteJobSettings = settings.deleteJobSettings.toBuilder();
deleteJobOperationSettings = settings.deleteJobOperationSettings.toBuilder();
cancelJobSettings = settings.cancelJobSettings.toBuilder();
cancelJobOperationSettings = settings.cancelJobOperationSettings.toBuilder();
listJobsSettings = settings.listJobsSettings.toBuilder();
getTaskSettings = settings.getTaskSettings.toBuilder();
listTasksSettings = settings.listTasksSettings.toBuilder();
listLocationsSettings = settings.listLocationsSettings.toBuilder();
getLocationSettings = settings.getLocationSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
createJobSettings,
getJobSettings,
deleteJobSettings,
cancelJobSettings,
listJobsSettings,
getTaskSettings,
listTasksSettings,
listLocationsSettings,
getLocationSettings);
}
private static Builder createDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder createHttpJsonDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultHttpJsonTransportProviderBuilder().build());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultHttpJsonApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.createJobSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.getJobSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.deleteJobSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.cancelJobSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.listJobsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getTaskSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.listTasksSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.listLocationsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.getLocationSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.deleteJobOperationSettings()
.setInitialCallSettings(
UnaryCallSettings.<DeleteJobRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Empty.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(OperationMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(45000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(300000L))
.build()));
builder
.cancelJobOperationSettings()
.setInitialCallSettings(
UnaryCallSettings.<CancelJobRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(CancelJobResponse.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(OperationMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(45000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(300000L))
.build()));
return builder;
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to createJob. */
public UnaryCallSettings.Builder<CreateJobRequest, Job> createJobSettings() {
return createJobSettings;
}
/** Returns the builder for the settings used for calls to getJob. */
public UnaryCallSettings.Builder<GetJobRequest, Job> getJobSettings() {
return getJobSettings;
}
/** Returns the builder for the settings used for calls to deleteJob. */
public UnaryCallSettings.Builder<DeleteJobRequest, Operation> deleteJobSettings() {
return deleteJobSettings;
}
/** Returns the builder for the settings used for calls to deleteJob. */
public OperationCallSettings.Builder<DeleteJobRequest, Empty, OperationMetadata>
deleteJobOperationSettings() {
return deleteJobOperationSettings;
}
/** Returns the builder for the settings used for calls to cancelJob. */
public UnaryCallSettings.Builder<CancelJobRequest, Operation> cancelJobSettings() {
return cancelJobSettings;
}
/** Returns the builder for the settings used for calls to cancelJob. */
public OperationCallSettings.Builder<CancelJobRequest, CancelJobResponse, OperationMetadata>
cancelJobOperationSettings() {
return cancelJobOperationSettings;
}
/** Returns the builder for the settings used for calls to listJobs. */
public PagedCallSettings.Builder<ListJobsRequest, ListJobsResponse, ListJobsPagedResponse>
listJobsSettings() {
return listJobsSettings;
}
/** Returns the builder for the settings used for calls to getTask. */
public UnaryCallSettings.Builder<GetTaskRequest, Task> getTaskSettings() {
return getTaskSettings;
}
/** Returns the builder for the settings used for calls to listTasks. */
public PagedCallSettings.Builder<ListTasksRequest, ListTasksResponse, ListTasksPagedResponse>
listTasksSettings() {
return listTasksSettings;
}
/** Returns the builder for the settings used for calls to listLocations. */
public PagedCallSettings.Builder<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings() {
return listLocationsSettings;
}
/** Returns the builder for the settings used for calls to getLocation. */
public UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings() {
return getLocationSettings;
}
@Override
public BatchServiceStubSettings build() throws IOException {
return new BatchServiceStubSettings(this);
}
}
}
|
apache/geode | 37,093 | geode-core/src/main/java/org/apache/geode/internal/cache/TXStateProxyImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.logging.log4j.Logger;
import org.apache.geode.GemFireException;
import org.apache.geode.annotations.internal.MakeNotStatic;
import org.apache.geode.cache.CommitConflictException;
import org.apache.geode.cache.EntryNotFoundException;
import org.apache.geode.cache.Region.Entry;
import org.apache.geode.cache.TransactionDataNotColocatedException;
import org.apache.geode.cache.TransactionDataRebalancedException;
import org.apache.geode.cache.TransactionException;
import org.apache.geode.cache.TransactionId;
import org.apache.geode.cache.UnsupportedOperationInTransactionException;
import org.apache.geode.cache.client.internal.ServerRegionDataAccess;
import org.apache.geode.distributed.DistributedMember;
import org.apache.geode.distributed.internal.membership.InternalDistributedMember;
import org.apache.geode.internal.cache.tier.sockets.ClientProxyMembershipID;
import org.apache.geode.internal.cache.tier.sockets.VersionedObjectList;
import org.apache.geode.internal.cache.tx.ClientTXStateStub;
import org.apache.geode.internal.cache.tx.TransactionalOperation.ServerRegionOperation;
import org.apache.geode.internal.lang.SystemPropertyHelper;
import org.apache.geode.internal.statistics.StatisticsClock;
import org.apache.geode.logging.internal.log4j.api.LogService;
public class TXStateProxyImpl implements TXStateProxy {
private static final Logger logger = LogService.getLogger();
@MakeNotStatic
protected static final AtomicBoolean txDistributedClientWarningIssued = new AtomicBoolean();
private boolean isJTA;
private TXId txId;
protected final TXManagerImpl txMgr;
protected DistributedMember target;
private boolean commitRequestedByOwner;
private boolean isJCATransaction;
private final ReentrantLock lock = new ReentrantLock();
/** number of operations in this transaction */
private int operationCount = 0;
/**
* tracks bucketIds of transactional operations so as to distinguish between
* TransactionDataNotColocated and TransactionDataRebalanced exceptions.
*/
private final Map<Integer, Boolean> buckets = new HashMap<>();
private boolean firstOperationOnPartitionedRegion = false;
protected volatile TXStateInterface realDeal;
protected boolean inProgress = true;
protected InternalDistributedMember onBehalfOfClientMember = null;
private final InternalCache cache;
private long lastOperationTimeFromClient;
private final StatisticsClock statisticsClock;
private boolean removedCausedByFailover = false;
public TXStateProxyImpl(InternalCache cache, TXManagerImpl managerImpl, TXId id,
InternalDistributedMember clientMember, StatisticsClock statisticsClock) {
this.cache = cache;
txMgr = managerImpl;
txId = id;
isJTA = false;
onBehalfOfClientMember = clientMember;
this.statisticsClock = statisticsClock;
}
public TXStateProxyImpl(InternalCache cache, TXManagerImpl managerImpl, TXId id, boolean isjta,
StatisticsClock statisticsClock) {
this.cache = cache;
txMgr = managerImpl;
txId = id;
isJTA = isjta;
this.statisticsClock = statisticsClock;
}
@Override
public ReentrantLock getLock() {
return lock;
}
protected StatisticsClock getStatisticsClock() {
return statisticsClock;
}
boolean isJTA() {
return isJTA;
}
@Override
public TXId getTxId() {
return txId;
}
@Override
public TXManagerImpl getTxMgr() {
return txMgr;
}
/**
* This returns either the TXState for the current transaction or a proxy for the state if it is
* held in another member. If no state currently exists, one is created
*
* @param key the key of the entry that is currently being modified
* @param r the region that is currently being modified
* @return the state or a proxy for the state
*/
public TXStateInterface getRealDeal(KeyInfo key, InternalRegion r) {
if (realDeal == null) {
if (r == null) { // TODO: stop gap to get tests working
realDeal = new TXState(this, false, statisticsClock);
} else {
// Code to keep going forward
if (r.hasServerProxy()) {
realDeal =
new ClientTXStateStub(r.getCache(), r.getDistributionManager(), this, target, r);
if (r.getScope().isDistributed()) {
if (txDistributedClientWarningIssued.compareAndSet(false, true)) {
logger.warn(
"Distributed region {} is being used in a client-initiated transaction. The transaction will only affect servers and this client. To keep from seeing this message use 'local' scope in client regions used in transactions.",
r.getFullPath());
}
}
} else {
target = null;
// wait for the region to be initialized fixes bug 44652
r.waitOnInitialization(r.getInitializationLatchBeforeGetInitialImage());
target = r.getOwnerForKey(key);
if (target == null || target.equals(txMgr.getDM().getId())) {
realDeal = new TXState(this, false, statisticsClock);
} else {
realDeal = new PeerTXStateStub(this, target, onBehalfOfClientMember);
}
}
}
if (logger.isDebugEnabled()) {
logger.debug("Built a new TXState: {} me:{}", realDeal, txMgr.getDM().getId());
}
}
if (isRealDealLocal() && !((TXState) realDeal).hasPerformedAnyOperation()) {
if (r != null && (r instanceof PartitionedRegion || r.isUsedForPartitionedRegionBucket())) {
firstOperationOnPartitionedRegion = true;
}
}
return realDeal;
}
public TXStateInterface getRealDeal(DistributedMember t) {
assert t != null;
if (realDeal == null) {
target = t;
if (target.equals(getCache().getDistributedSystem().getDistributedMember())) {
realDeal = new TXState(this, false, statisticsClock);
} else {
/*
* txtodo: // what to do!! We don't know if this is client or server!!!
*/
realDeal = new PeerTXStateStub(this, target, onBehalfOfClientMember);
}
if (logger.isDebugEnabled()) {
logger.debug("Built a new TXState: {} me:{}", realDeal, txMgr.getDM().getId());
}
}
return realDeal;
}
protected void setTXIDForReplay(TXId id) {
txId = id;
}
@Override
public boolean isOnBehalfOfClient() {
return onBehalfOfClientMember != null;
}
@Override
public void setIsJTA(boolean isJTA) {
this.isJTA = isJTA;
}
@Override
public void checkJTA(String errmsg) throws IllegalStateException {
if (isJTA()) {
throw new IllegalStateException(errmsg);
}
}
boolean isRemovedCausedByFailover() {
return removedCausedByFailover;
}
void setRemovedCausedByFailover(boolean removedCausedByFailover) {
this.removedCausedByFailover = removedCausedByFailover;
}
@Override
public void precommit()
throws CommitConflictException, UnsupportedOperationInTransactionException {
throw new UnsupportedOperationInTransactionException(
String.format("precommit() operation %s meant for Dist Tx is not supported",
"precommit"));
}
@Override
public void commit() throws CommitConflictException {
boolean preserveTx = false;
try {
getRealDeal(null, null).commit();
} catch (UnsupportedOperationInTransactionException e) {
// fix for #42490
preserveTx = true;
throw e;
} finally {
inProgress = preserveTx;
}
}
TransactionException getTransactionException(KeyInfo keyInfo, GemFireException e) {
if (isRealDealLocal() && !buckets.isEmpty() && !buckets.containsKey(keyInfo.getBucketId())) {
return new TransactionDataNotColocatedException(
String.format("Key %s is not colocated with transaction",
keyInfo.getKey()),
e.getCause());
}
Throwable ex = e;
while (ex != null) {
if (ex instanceof PrimaryBucketException) {
if (isRealDealLocal() && !firstOperationOnPartitionedRegion) {
return new TransactionDataNotColocatedException(
String.format(
"Key %s is not colocated with transaction. First operation in a transaction "
+ "should be on a partitioned region when there are operations on both "
+ "partitioned regions and replicate regions.",
keyInfo.getKey()));
}
return new TransactionDataRebalancedException(
"Transactional data moved, due to rebalancing.");
}
ex = ex.getCause();
}
return (TransactionException) e;
}
@Override
public boolean containsValueForKey(KeyInfo keyInfo, LocalRegion region) {
try {
operationCount++;
boolean retVal = getRealDeal(keyInfo, region).containsValueForKey(keyInfo, region);
trackBucketForTx(keyInfo);
return retVal;
} catch (TransactionDataRebalancedException transactionDataRebalancedException) {
throw handleTransactionDataRebalancedException(keyInfo, transactionDataRebalancedException);
} catch (PrimaryBucketException primaryBucketException) {
throw getTransactionException(keyInfo, primaryBucketException);
}
}
private TransactionException handleTransactionDataRebalancedException(KeyInfo keyInfo,
TransactionDataRebalancedException transactionDataRebalancedException) {
if (isRealDealLocal()) {
return getTransactionException(keyInfo, transactionDataRebalancedException);
}
return transactionDataRebalancedException;
}
void trackBucketForTx(KeyInfo keyInfo) {
if (keyInfo.getBucketId() >= 0) {
if (logger.isDebugEnabled()) {
logger.debug("adding bucket:{} for tx:{}", keyInfo.getBucketId(), getTransactionId());
}
}
if (keyInfo.getBucketId() >= 0) {
buckets.put(keyInfo.getBucketId(), Boolean.TRUE);
}
}
@Override
public void destroyExistingEntry(EntryEventImpl event, boolean cacheWrite,
Object expectedOldValue) throws EntryNotFoundException {
try {
operationCount++;
getRealDeal(event.getKeyInfo(), event.getRegion()).destroyExistingEntry(event, cacheWrite,
expectedOldValue);
trackBucketForTx(event.getKeyInfo());
} catch (TransactionDataRebalancedException transactionDataRebalancedException) {
throw handleTransactionDataRebalancedException(event.getKeyInfo(),
transactionDataRebalancedException);
} catch (PrimaryBucketException primaryBucketException) {
throw getTransactionException(event.getKeyInfo(), primaryBucketException);
}
}
@Override
public long getBeginTime() {
return getRealDeal(null, null).getBeginTime();
}
@Override
public InternalCache getCache() {
return cache;
}
@Override
public int getChanges() {
assertBootstrapped();
return getRealDeal(null, null).getChanges();
}
@Override
public Object getDeserializedValue(KeyInfo keyInfo, LocalRegion localRegion, boolean updateStats,
boolean disableCopyOnRead, boolean preferCD, EntryEventImpl clientEvent,
boolean returnTombstones, boolean retainResult, boolean createIfAbsent) {
try {
Object val = getRealDeal(keyInfo, localRegion).getDeserializedValue(keyInfo, localRegion,
updateStats, disableCopyOnRead, preferCD, null, false, retainResult, createIfAbsent);
trackBucketForTx(keyInfo);
if (val != null) {
// fixes bug 51057: TXStateStub on client always returns null, so do not increment
// the operation count it will be incremented in findObject()
operationCount++;
}
return val;
} catch (TransactionDataRebalancedException transactionDataRebalancedException) {
throw handleTransactionDataRebalancedException(keyInfo, transactionDataRebalancedException);
} catch (PrimaryBucketException primaryBucketException) {
throw getTransactionException(keyInfo, primaryBucketException);
}
}
@Override
public Entry getEntry(KeyInfo keyInfo, LocalRegion region, boolean allowTombstones) {
try {
operationCount++;
Entry retVal = getRealDeal(keyInfo, region).getEntry(keyInfo, region, allowTombstones);
trackBucketForTx(keyInfo);
return retVal;
} catch (TransactionDataRebalancedException transactionDataRebalancedException) {
throw handleTransactionDataRebalancedException(keyInfo, transactionDataRebalancedException);
} catch (PrimaryBucketException primaryBucketException) {
throw getTransactionException(keyInfo, primaryBucketException);
}
}
@Override
public TXEvent getEvent() {
assertBootstrapped();
return getRealDeal(null, null).getEvent();
}
@Override
public List getEvents() {
assertBootstrapped();
return getRealDeal(null, null).getEvents();
}
@Override
public Collection<InternalRegion> getRegions() {
assertBootstrapped();
return getRealDeal(null, null).getRegions();
}
@Override
public TransactionId getTransactionId() {
return txId;
}
@Override
public void invalidateExistingEntry(EntryEventImpl event, boolean invokeCallbacks,
boolean forceNewEntry) {
try {
operationCount++;
getRealDeal(event.getKeyInfo(), event.getRegion()).invalidateExistingEntry(event,
invokeCallbacks, forceNewEntry);
trackBucketForTx(event.getKeyInfo());
} catch (TransactionDataRebalancedException transactionDataRebalancedException) {
throw handleTransactionDataRebalancedException(event.getKeyInfo(),
transactionDataRebalancedException);
} catch (PrimaryBucketException primaryBucketException) {
throw getTransactionException(event.getKeyInfo(), primaryBucketException);
}
}
@Override
public boolean isInProgress() {
return inProgress;
}
@Override
public void setInProgress(boolean progress) {
inProgress = progress;
}
@Override
public boolean needsLargeModCount() {
assertBootstrapped();
return getRealDeal(null, null).needsLargeModCount();
}
@Override
public int nextModSerialNum() {
assertBootstrapped();
return getRealDeal(null, null).nextModSerialNum();
}
@Override
public TXRegionState readRegion(InternalRegion r) {
assertBootstrapped();
return getRealDeal(null, r).readRegion(r);
}
@Override
public void rmRegion(LocalRegion r) {
assertBootstrapped();
getRealDeal(null, r).rmRegion(r);
}
@Override
public void rollback() {
try {
getRealDeal(null, null).rollback();
} finally {
inProgress = false;
}
}
@Override
public boolean txPutEntry(EntryEventImpl event, boolean ifNew, boolean requireOldValue,
boolean checkResources, Object expectedOldValue) {
try {
operationCount++;
boolean retVal = getRealDeal(event.getKeyInfo(), event.getRegion())
.txPutEntry(event, ifNew, requireOldValue, checkResources, expectedOldValue);
trackBucketForTx(event.getKeyInfo());
return retVal;
} catch (TransactionDataRebalancedException transactionDataRebalancedException) {
throw handleTransactionDataRebalancedException(event.getKeyInfo(),
transactionDataRebalancedException);
} catch (PrimaryBucketException primaryBucketException) {
throw getTransactionException(event.getKeyInfo(), primaryBucketException);
}
}
@Override
public TXEntryState txReadEntry(KeyInfo keyInfo, LocalRegion localRegion, boolean rememberRead,
boolean createTxEntryIfAbsent) {
try {
operationCount++;
TXEntryState retVal = getRealDeal(keyInfo, localRegion).txReadEntry(keyInfo, localRegion,
rememberRead, createTxEntryIfAbsent);
trackBucketForTx(keyInfo);
return retVal;
} catch (TransactionDataRebalancedException transactionDataRebalancedException) {
throw handleTransactionDataRebalancedException(keyInfo, transactionDataRebalancedException);
} catch (PrimaryBucketException primaryBucketException) {
throw getTransactionException(keyInfo, primaryBucketException);
}
}
@Override
public TXRegionState txReadRegion(InternalRegion internalRegion) {
assertBootstrapped();
return getRealDeal(null, internalRegion).txReadRegion(internalRegion);
}
@Override
public TXRegionState txWriteRegion(InternalRegion internalRegion, KeyInfo entryKey) {
return getRealDeal(entryKey, internalRegion).txWriteRegion(internalRegion, entryKey);
}
@Override
public TXRegionState writeRegion(InternalRegion r) {
assertBootstrapped();
return getRealDeal(null, r).writeRegion(r);
}
private void assertBootstrapped() {
assert realDeal != null;
}
@Override
public void afterCompletion(int status) {
assertBootstrapped();
try {
getRealDeal(null, null).afterCompletion(status);
} finally {
inProgress = false;
}
}
@Override
public void beforeCompletion() {
assertBootstrapped();
getRealDeal(null, null).beforeCompletion();
}
@Override
public boolean containsKey(KeyInfo keyInfo, LocalRegion localRegion) {
try {
operationCount++;
boolean retVal = getRealDeal(keyInfo, localRegion).containsKey(keyInfo, localRegion);
trackBucketForTx(keyInfo);
return retVal;
} catch (TransactionDataRebalancedException transactionDataRebalancedException) {
throw handleTransactionDataRebalancedException(keyInfo, transactionDataRebalancedException);
} catch (PrimaryBucketException primaryBucketException) {
throw getTransactionException(keyInfo, primaryBucketException);
}
}
@Override
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "UL_UNRELEASED_LOCK",
justification = "This method unlocks and then conditionally undoes the unlock in the finally-block. Review again at later time.")
public int entryCount(LocalRegion localRegion) {
// if size is the first operation in the transaction, then reset the txState
boolean resetTXState = realDeal == null;
TXStateProxy txp = null;
boolean txUnlocked = false;
if (resetTXState) {
txp = getTxMgr().pauseTransaction();
} else {
if (getLock().isHeldByCurrentThread()) {
txUnlocked = true; // bug #42945 - hang trying to compute size for PR
getLock().unlock();
}
}
try {
if (resetTXState) {
return localRegion.getSharedDataView().entryCount(localRegion);
}
return getRealDeal(null, localRegion).entryCount(localRegion);
} finally {
if (resetTXState) {
getTxMgr().unpauseTransaction(txp);
} else if (txUnlocked) {
getLock().lock();
}
}
}
@Override
public Object findObject(KeyInfo key, LocalRegion r, boolean isCreate, boolean generateCallbacks,
Object value, boolean disableCopyOnRead, boolean preferCD,
ClientProxyMembershipID requestingClient, EntryEventImpl clientEvent,
boolean returnTombstones) {
try {
operationCount++;
Object retVal = getRealDeal(key, r).findObject(key, r, isCreate, generateCallbacks, value,
disableCopyOnRead, preferCD, requestingClient, clientEvent, false);
trackBucketForTx(key);
return retVal;
} catch (TransactionDataRebalancedException transactionDataRebalancedException) {
throw handleTransactionDataRebalancedException(key, transactionDataRebalancedException);
} catch (PrimaryBucketException primaryBucketException) {
throw getTransactionException(key, primaryBucketException);
}
}
@Override
public Set getAdditionalKeysForIterator(LocalRegion currRgn) {
if (realDeal == null) {
return null;
}
return getRealDeal(null, currRgn).getAdditionalKeysForIterator(currRgn);
}
protected final boolean restoreSetOperationTransactionBehavior =
SystemPropertyHelper.restoreSetOperationTransactionBehavior();
@Override
public Object getEntryForIterator(KeyInfo key, LocalRegion currRgn, boolean rememberReads,
boolean allowTombstones) {
boolean resetTxState = isTransactionInternalSuspendNeeded(currRgn);
TXStateProxy txp = null;
if (resetTxState) {
txp = getTxMgr().pauseTransaction();
}
try {
if (resetTxState) {
return currRgn.getSharedDataView().getEntry(key, currRgn, allowTombstones);
}
return getRealDeal(key, currRgn).getEntryForIterator(key, currRgn, rememberReads,
allowTombstones);
} finally {
if (resetTxState) {
getTxMgr().unpauseTransaction(txp);
}
}
}
private boolean isTransactionInternalSuspendNeeded(LocalRegion region) {
// for peer accessor, do not bootstrap transaction in the node as subsequent operations
// will fail as transaction should be on data node only
return realDeal == null && (isPeerAccessor(region) || restoreSetOperationTransactionBehavior);
}
private boolean isPeerAccessor(LocalRegion region) {
if (region.hasServerProxy()) {
return false;
}
return !region.canStoreDataLocally();
}
@Override
public Object getKeyForIterator(KeyInfo keyInfo, LocalRegion currRgn, boolean rememberReads,
boolean allowTombstones) {
boolean resetTxState = isTransactionInternalSuspendNeeded(currRgn);
TXStateProxy txp = null;
if (resetTxState) {
txp = getTxMgr().pauseTransaction();
}
try {
if (resetTxState) {
return currRgn.getSharedDataView().getKeyForIterator(keyInfo, currRgn, rememberReads,
allowTombstones);
}
return getRealDeal(keyInfo, currRgn).getKeyForIterator(keyInfo, currRgn, rememberReads,
allowTombstones);
} finally {
if (resetTxState) {
getTxMgr().unpauseTransaction(txp);
}
}
}
@Override
public Object getValueInVM(KeyInfo keyInfo, LocalRegion localRegion, boolean rememberRead) {
operationCount++;
return getRealDeal(keyInfo, localRegion).getValueInVM(keyInfo, localRegion, rememberRead);
}
@Override
public boolean isDeferredStats() {
assertBootstrapped();
return getRealDeal(null, null).isDeferredStats();
}
@Override
public boolean putEntry(EntryEventImpl event, boolean ifNew, boolean ifOld,
Object expectedOldValue, boolean requireOldValue, long lastModified,
boolean overwriteDestroyed) {
return putEntry(event, ifNew, ifOld, expectedOldValue, requireOldValue, lastModified,
overwriteDestroyed, true, false);
}
@Override
public boolean putEntry(EntryEventImpl event, boolean ifNew, boolean ifOld,
Object expectedOldValue, boolean requireOldValue, long lastModified,
boolean overwriteDestroyed, boolean invokeCallbacks, boolean throwConcurrentModification) {
try {
operationCount++;
boolean retVal = getRealDeal(event.getKeyInfo(), event.getRegion()).putEntry(event, ifNew,
ifOld, expectedOldValue, requireOldValue, lastModified, overwriteDestroyed);
trackBucketForTx(event.getKeyInfo());
return retVal;
} catch (TransactionDataRebalancedException transactionDataRebalancedException) {
throw handleTransactionDataRebalancedException(event.getKeyInfo(),
transactionDataRebalancedException);
} catch (PrimaryBucketException primaryBucketException) {
throw getTransactionException(event.getKeyInfo(), primaryBucketException);
}
}
@Override
public boolean isInProgressAndSameAs(TXStateInterface otherState) {
return isInProgress() && otherState == this;
}
@Override
public void setLocalTXState(TXStateInterface state) {
realDeal = state;
}
@Override
public Object getSerializedValue(LocalRegion localRegion, KeyInfo key, boolean doNotLockEntry,
ClientProxyMembershipID requestingClient, EntryEventImpl clientEvent,
boolean returnTombstones) throws DataLocationException {
operationCount++;
try {
Object retVal =
getRealDeal(key, localRegion).getSerializedValue(localRegion, key, doNotLockEntry,
requestingClient, clientEvent, returnTombstones);
trackBucketForTx(key);
return retVal;
} catch (TransactionDataRebalancedException transactionDataRebalancedException) {
throw handleTransactionDataRebalancedException(key, transactionDataRebalancedException);
} catch (PrimaryBucketException primaryBucketException) {
throw getTransactionException(key, primaryBucketException);
}
}
@Override
public boolean putEntryOnRemote(EntryEventImpl event, boolean ifNew, boolean ifOld,
Object expectedOldValue, boolean requireOldValue, long lastModified,
boolean overwriteDestroyed) throws DataLocationException {
operationCount++;
TXStateInterface tx = getRealDeal(event.getKeyInfo(), event.getRegion());
assert (tx instanceof TXState) : tx.getClass().getSimpleName();
try {
boolean retVal = tx.putEntryOnRemote(event, ifNew, ifOld, expectedOldValue, requireOldValue,
lastModified, overwriteDestroyed);
trackBucketForTx(event.getKeyInfo());
return retVal;
} catch (TransactionDataRebalancedException transactionDataRebalancedException) {
throw handleTransactionDataRebalancedException(event.getKeyInfo(),
transactionDataRebalancedException);
} catch (PrimaryBucketException primaryBucketException) {
throw getTransactionException(event.getKeyInfo(), primaryBucketException);
}
}
@Override
public boolean isFireCallbacks() {
return getRealDeal(null, null).isFireCallbacks();
}
@Override
public void destroyOnRemote(EntryEventImpl event, boolean cacheWrite, Object expectedOldValue)
throws DataLocationException {
operationCount++;
TXStateInterface tx = getRealDeal(event.getKeyInfo(), event.getRegion());
assert (tx instanceof TXState);
try {
tx.destroyOnRemote(event, cacheWrite, expectedOldValue);
trackBucketForTx(event.getKeyInfo());
} catch (TransactionDataRebalancedException transactionDataRebalancedException) {
throw handleTransactionDataRebalancedException(event.getKeyInfo(),
transactionDataRebalancedException);
} catch (PrimaryBucketException primaryBucketException) {
throw getTransactionException(event.getKeyInfo(), primaryBucketException);
}
}
@Override
public void invalidateOnRemote(EntryEventImpl event, boolean invokeCallbacks,
boolean forceNewEntry) throws DataLocationException {
operationCount++;
TXStateInterface tx = getRealDeal(event.getKeyInfo(), event.getRegion());
assert (tx instanceof TXState);
try {
tx.invalidateOnRemote(event, invokeCallbacks, forceNewEntry);
trackBucketForTx(event.getKeyInfo());
} catch (TransactionDataRebalancedException transactionDataRebalancedException) {
throw handleTransactionDataRebalancedException(event.getKeyInfo(),
transactionDataRebalancedException);
} catch (PrimaryBucketException primaryBucketException) {
throw getTransactionException(event.getKeyInfo(), primaryBucketException);
}
}
@Override
public void checkSupportsRegionDestroy() throws UnsupportedOperationInTransactionException {
throw new UnsupportedOperationInTransactionException(
"destroyRegion() is not supported while in a transaction");
}
@Override
public void checkSupportsRegionInvalidate() throws UnsupportedOperationInTransactionException {
throw new UnsupportedOperationInTransactionException(
"invalidateRegion() is not supported while in a transaction");
}
@Override
public void checkSupportsRegionClear() throws UnsupportedOperationInTransactionException {
throw new UnsupportedOperationInTransactionException(
"clear() is not supported while in a transaction");
}
@Override
public Set getBucketKeys(LocalRegion localRegion, int bucketId, boolean allowTombstones) {
boolean resetTxState = isTransactionInternalSuspendNeeded(localRegion);
TXStateProxy txp = null;
if (resetTxState) {
txp = getTxMgr().pauseTransaction();
}
try {
if (resetTxState) {
return localRegion.getSharedDataView().getBucketKeys(localRegion, bucketId, false);
}
return getRealDeal(null, localRegion).getBucketKeys(localRegion, bucketId, false);
} finally {
if (resetTxState) {
getTxMgr().unpauseTransaction(txp);
}
}
}
@Override
public Entry getEntryOnRemote(KeyInfo keyInfo, LocalRegion localRegion, boolean allowTombstones)
throws DataLocationException {
operationCount++;
TXStateInterface tx = getRealDeal(keyInfo, localRegion);
assert (tx instanceof TXState);
try {
return tx.getEntryOnRemote(keyInfo, localRegion, allowTombstones);
} catch (TransactionDataRebalancedException transactionDataRebalancedException) {
throw handleTransactionDataRebalancedException(keyInfo, transactionDataRebalancedException);
} catch (PrimaryBucketException primaryBucketException) {
throw getTransactionException(keyInfo, primaryBucketException);
}
}
public void forceLocalBootstrap() {
getRealDeal(null, null);
}
@Override
public DistributedMember getTarget() {
return target;
}
@Override
public void setTarget(DistributedMember target) {
assert this.target == null;
getRealDeal(target);
if (this.target == null && isRealDealLocal()) {
assert target.equals(getCache().getDistributedSystem().getDistributedMember());
this.target = target;
}
}
@Override
public Collection<?> getRegionKeysForIteration(LocalRegion currRegion) {
if (currRegion.isUsedForPartitionedRegionBucket()) {
return currRegion.getRegionKeysForIteration();
} else {
boolean resetTxState = isTransactionInternalSuspendNeeded(currRegion);
TXStateProxy txp = null;
if (resetTxState) {
txp = getTxMgr().pauseTransaction();
}
try {
if (resetTxState) {
return currRegion.getSharedDataView().getRegionKeysForIteration(currRegion);
}
return getRealDeal(null, currRegion).getRegionKeysForIteration(currRegion);
} finally {
if (resetTxState) {
getTxMgr().unpauseTransaction(txp);
}
}
}
}
@Override
public boolean isCommitOnBehalfOfRemoteStub() {
return commitRequestedByOwner;
}
@Override
public boolean setCommitOnBehalfOfRemoteStub(boolean requestedByOwner) {
return commitRequestedByOwner = requestedByOwner;
}
@Override
public boolean isRealDealLocal() {
if (realDeal != null) {
return realDeal.isRealDealLocal();
} else {
// no real deal
return false;
}
}
/** if there is local txstate, return it */
public TXState getLocalRealDeal() {
if (realDeal != null) {
if (realDeal.isRealDealLocal()) {
return (TXState) realDeal;
}
}
return null;
}
public boolean hasRealDeal() {
return realDeal != null;
}
@Override
public String toString() {
return "TXStateProxyImpl@" + System.identityHashCode(this) + " txId:"
+ txId + " realDeal:" + realDeal + " isJTA:"
+ isJTA;
}
@Override
public InternalDistributedMember getOriginatingMember() {
if (realDeal == null) {
return null;
} else {
return realDeal.getOriginatingMember();
}
}
@Override
public boolean isMemberIdForwardingRequired() {
if (realDeal == null) {
return false;
} else {
return realDeal.isMemberIdForwardingRequired();
}
}
@Override
public TXCommitMessage getCommitMessage() {
if (realDeal == null) {
return null;
} else {
return realDeal.getCommitMessage();
}
}
@Override
public void postPutAll(DistributedPutAllOperation putallOp, VersionedObjectList successfulPuts,
InternalRegion reg) {
if (putallOp.putAllData.length == 0) {
return;
}
reg.getCancelCriterion().checkCancelInProgress(null); // fix for bug #43651
Object key = null;
if (putallOp.putAllData[0] != null) {
key = putallOp.putAllData[0].key;
}
KeyInfo ki = new KeyInfo(key, null, null);
TXStateInterface tsi = getRealDeal(ki, reg);
tsi.postPutAll(putallOp, successfulPuts, reg);
}
@Override
public void postRemoveAll(DistributedRemoveAllOperation op, VersionedObjectList successfulOps,
InternalRegion reg) {
if (op.removeAllData.length == 0) {
return;
}
reg.getCancelCriterion().checkCancelInProgress(null); // fix for bug #43651
Object key = null;
if (op.removeAllData[0] != null) {
key = op.removeAllData[0].key;
}
KeyInfo ki = new KeyInfo(key, null, null);
TXStateInterface tsi = getRealDeal(ki, reg);
tsi.postRemoveAll(op, successfulOps, reg);
}
@Override
public boolean isJCATransaction() {
return isJCATransaction;
}
@Override
public void setJCATransaction() {
isJCATransaction = true;
}
@Override
public Entry accessEntry(KeyInfo keyInfo, LocalRegion region) {
try {
operationCount++;
Entry retVal = getRealDeal(keyInfo, region).accessEntry(keyInfo, region);
trackBucketForTx(keyInfo);
return retVal;
} catch (TransactionDataRebalancedException transactionDataRebalancedException) {
throw handleTransactionDataRebalancedException(keyInfo, transactionDataRebalancedException);
} catch (PrimaryBucketException primaryBucketException) {
throw getTransactionException(keyInfo, primaryBucketException);
}
}
@Override
public void suspend() {
if (realDeal != null) {
getRealDeal(null, null).suspend();
}
}
@Override
public void resume() {
if (realDeal != null) {
getRealDeal(null, null).resume();
}
}
/** test hook - record a list of ops in the transaction */
@Override
public void recordTXOperation(ServerRegionDataAccess region, ServerRegionOperation op, Object key,
Object[] arguments) {
if (ClientTXStateStub.transactionRecordingEnabled()) {
getRealDeal(null, (LocalRegion) region.getRegion()).recordTXOperation(region, op, key,
arguments);
}
}
@Override
public int operationCount() {
return operationCount;
}
/**
* increments the operation count by 1
*/
public void incOperationCount() {
operationCount++;
}
@Override
public void updateEntryVersion(EntryEventImpl event) throws EntryNotFoundException {
// Do nothing. Not applicable for transactions.
}
@Override
public void close() {
if (realDeal != null) {
realDeal.close();
}
}
@Override
public boolean isTxState() {
return false;
}
@Override
public boolean isTxStateStub() {
return false;
}
@Override
public boolean isTxStateProxy() {
return true;
}
@Override
public boolean isDistTx() {
return false;
}
@Override
public boolean isCreatedOnDistTxCoordinator() {
return false;
}
@Override
public void updateProxyServer(InternalDistributedMember proxy) {
// only update in TXState if it has one
if (realDeal != null && realDeal.isRealDealLocal() && isOnBehalfOfClient()) {
((TXState) realDeal).setProxyServer(proxy);
}
}
public boolean isOverTransactionTimeoutLimit() {
return getCurrentTime() - getLastOperationTimeFromClient() > TimeUnit.SECONDS
.toMillis(txMgr.getTransactionTimeToLive());
}
long getCurrentTime() {
return System.currentTimeMillis();
}
synchronized long getLastOperationTimeFromClient() {
return lastOperationTimeFromClient;
}
public synchronized void setLastOperationTimeFromClient(long lastOperationTimeFromClient) {
this.lastOperationTimeFromClient = lastOperationTimeFromClient;
}
@Override
public InternalDistributedMember getOnBehalfOfClientMember() {
return onBehalfOfClientMember;
}
void setFirstOperationOnPartitionedRegion(boolean firstOperationOnPartitionedRegion) {
this.firstOperationOnPartitionedRegion = firstOperationOnPartitionedRegion;
}
}
|
apache/kafka | 37,360 | clients/src/test/java/org/apache/kafka/clients/consumer/internals/ShareHeartbeatRequestManagerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.clients.consumer.internals;
import org.apache.kafka.clients.ClientResponse;
import org.apache.kafka.clients.Metadata;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.internals.events.BackgroundEventHandler;
import org.apache.kafka.clients.consumer.internals.events.ErrorEvent;
import org.apache.kafka.common.KafkaException;
import org.apache.kafka.common.Node;
import org.apache.kafka.common.Uuid;
import org.apache.kafka.common.errors.TimeoutException;
import org.apache.kafka.common.errors.UnsupportedVersionException;
import org.apache.kafka.common.message.ShareGroupHeartbeatRequestData;
import org.apache.kafka.common.message.ShareGroupHeartbeatResponseData;
import org.apache.kafka.common.metrics.KafkaMetric;
import org.apache.kafka.common.metrics.Metrics;
import org.apache.kafka.common.protocol.ApiKeys;
import org.apache.kafka.common.protocol.Errors;
import org.apache.kafka.common.requests.RequestHeader;
import org.apache.kafka.common.requests.ShareGroupHeartbeatRequest;
import org.apache.kafka.common.requests.ShareGroupHeartbeatResponse;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.utils.LogContext;
import org.apache.kafka.common.utils.MockTime;
import org.apache.kafka.common.utils.Time;
import org.apache.kafka.common.utils.Timer;
import org.apache.kafka.common.utils.annotation.ApiKeyVersionsSource;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import org.junit.jupiter.params.provider.ValueSource;
import org.mockito.ArgumentCaptor;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Optional;
import java.util.Properties;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import static org.apache.kafka.clients.consumer.internals.ShareHeartbeatRequestManager.SHARE_PROTOCOL_NOT_SUPPORTED_MSG;
import static org.apache.kafka.clients.consumer.internals.ShareHeartbeatRequestManager.SHARE_PROTOCOL_VERSION_NOT_SUPPORTED_MSG;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertInstanceOf;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyBoolean;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.Mockito.clearInvocations;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class ShareHeartbeatRequestManagerTest {
private static final String DEFAULT_GROUP_ID = "groupId";
private static final String DEFAULT_MEMBER_ID = "member-id";
private static final int DEFAULT_MEMBER_EPOCH = 1;
private static final int DEFAULT_HEARTBEAT_INTERVAL_MS = 1000;
private static final int DEFAULT_MAX_POLL_INTERVAL_MS = 10000;
private static final long DEFAULT_RETRY_BACKOFF_MS = 80;
private static final long DEFAULT_RETRY_BACKOFF_MAX_MS = 1000;
private static final double DEFAULT_HEARTBEAT_JITTER_MS = 0.0;
private static final String SHARE_CONSUMER_COORDINATOR_METRICS = "consumer-share-coordinator-metrics";
private Time time;
private Timer pollTimer;
private CoordinatorRequestManager coordinatorRequestManager;
private SubscriptionState subscriptions;
private Metadata metadata;
private ShareHeartbeatRequestManager heartbeatRequestManager;
private ShareMembershipManager membershipManager;
private HeartbeatRequestState heartbeatRequestState;
private ShareHeartbeatRequestManager.HeartbeatState heartbeatState;
private BackgroundEventHandler backgroundEventHandler;
private Metrics metrics;
private LogContext logContext;
@BeforeEach
public void setUp() {
time = new MockTime();
pollTimer = spy(time.timer(DEFAULT_MAX_POLL_INTERVAL_MS));
coordinatorRequestManager = mock(CoordinatorRequestManager.class);
subscriptions = mock(SubscriptionState.class);
backgroundEventHandler = mock(BackgroundEventHandler.class);
membershipManager = mock(ShareMembershipManager.class);
heartbeatState = mock(ShareHeartbeatRequestManager.HeartbeatState.class);
metadata = mock(ConsumerMetadata.class);
metrics = new Metrics(time);
logContext = new LogContext();
ConsumerConfig config = mock(ConsumerConfig.class);
heartbeatRequestState = spy(new HeartbeatRequestState(
logContext,
time,
DEFAULT_HEARTBEAT_INTERVAL_MS,
DEFAULT_RETRY_BACKOFF_MS,
DEFAULT_RETRY_BACKOFF_MAX_MS,
DEFAULT_HEARTBEAT_JITTER_MS));
heartbeatRequestManager = new ShareHeartbeatRequestManager(
logContext,
pollTimer,
config,
coordinatorRequestManager,
membershipManager,
heartbeatState,
heartbeatRequestState,
backgroundEventHandler,
metrics);
when(coordinatorRequestManager.coordinator()).thenReturn(Optional.of(new Node(1, "localhost", 9999)));
}
private void createHeartbeatRequestStateWithZeroHeartbeatInterval() {
heartbeatRequestState = spy(new HeartbeatRequestState(logContext,
time,
0,
DEFAULT_RETRY_BACKOFF_MS,
DEFAULT_RETRY_BACKOFF_MAX_MS,
DEFAULT_HEARTBEAT_JITTER_MS));
heartbeatRequestManager = createHeartbeatRequestManager(
coordinatorRequestManager,
membershipManager,
heartbeatState,
heartbeatRequestState,
backgroundEventHandler);
}
private void createHeartbeatStateAndRequestManager() {
this.heartbeatState = new ShareHeartbeatRequestManager.HeartbeatState(
subscriptions,
membershipManager);
this.heartbeatRequestManager = createHeartbeatRequestManager(
coordinatorRequestManager,
membershipManager,
heartbeatState,
heartbeatRequestState,
backgroundEventHandler);
}
@Test
public void testHeartbeatOnStartup() {
NetworkClientDelegate.PollResult result = heartbeatRequestManager.poll(time.milliseconds());
assertEquals(0, result.unsentRequests.size());
createHeartbeatRequestStateWithZeroHeartbeatInterval();
assertEquals(0, heartbeatRequestManager.maximumTimeToWait(time.milliseconds()));
result = heartbeatRequestManager.poll(time.milliseconds());
assertEquals(1, result.unsentRequests.size());
// Ensure we do not resend the request without the first request being completed
NetworkClientDelegate.PollResult result2 = heartbeatRequestManager.poll(time.milliseconds());
assertEquals(0, result2.unsentRequests.size());
}
@Test
public void testSuccessfulHeartbeatTiming() {
NetworkClientDelegate.PollResult result = heartbeatRequestManager.poll(time.milliseconds());
assertEquals(0, result.unsentRequests.size(),
"No heartbeat should be sent while interval has not expired");
assertEquals(heartbeatRequestState.timeToNextHeartbeatMs(time.milliseconds()), result.timeUntilNextPollMs);
assertNextHeartbeatTiming(DEFAULT_HEARTBEAT_INTERVAL_MS);
result = heartbeatRequestManager.poll(time.milliseconds());
assertEquals(1, result.unsentRequests.size(), "A heartbeat should be sent when interval expires");
NetworkClientDelegate.UnsentRequest inflightReq = result.unsentRequests.get(0);
assertEquals(DEFAULT_HEARTBEAT_INTERVAL_MS,
heartbeatRequestState.timeToNextHeartbeatMs(time.milliseconds()),
"Heartbeat timer was not reset to the interval when the heartbeat request was sent.");
long partOfInterval = DEFAULT_HEARTBEAT_INTERVAL_MS / 3;
time.sleep(partOfInterval);
result = heartbeatRequestManager.poll(time.milliseconds());
assertEquals(0, result.unsentRequests.size(),
"No heartbeat should be sent while only part of the interval has passed");
assertEquals(DEFAULT_HEARTBEAT_INTERVAL_MS - partOfInterval,
heartbeatRequestState.timeToNextHeartbeatMs(time.milliseconds()),
"Time to next interval was not properly updated.");
inflightReq.handler().onComplete(createHeartbeatResponse(inflightReq, Errors.NONE));
assertNextHeartbeatTiming(DEFAULT_HEARTBEAT_INTERVAL_MS - partOfInterval);
}
@ParameterizedTest
@ApiKeyVersionsSource(apiKey = ApiKeys.SHARE_GROUP_HEARTBEAT)
public void testFirstHeartbeatIncludesRequiredInfoToJoinGroupAndGetAssignments(short version) {
createHeartbeatStateAndRequestManager();
createHeartbeatRequestStateWithZeroHeartbeatInterval();
time.sleep(DEFAULT_HEARTBEAT_INTERVAL_MS);
String topic = "topic1";
Set<String> set = Collections.singleton(topic);
when(subscriptions.subscription()).thenReturn(set);
subscriptions.subscribeToShareGroup(set);
// Create a ShareGroupHeartbeatRequest and verify the payload
mockJoiningMemberData();
assertEquals(0, heartbeatRequestManager.maximumTimeToWait(time.milliseconds()));
NetworkClientDelegate.PollResult pollResult = heartbeatRequestManager.poll(time.milliseconds());
assertEquals(1, pollResult.unsentRequests.size());
NetworkClientDelegate.UnsentRequest request = pollResult.unsentRequests.get(0);
assertInstanceOf(ShareGroupHeartbeatRequest.Builder.class, request.requestBuilder());
ShareGroupHeartbeatRequest heartbeatRequest =
(ShareGroupHeartbeatRequest) request.requestBuilder().build(version);
// Should include epoch 0 to join and no member ID.
assertTrue(heartbeatRequest.data().memberId().isEmpty());
assertEquals(0, heartbeatRequest.data().memberEpoch());
// Should include subscription and group basic info to start getting assignments.
assertEquals(Collections.singletonList(topic), heartbeatRequest.data().subscribedTopicNames());
assertEquals(DEFAULT_GROUP_ID, heartbeatRequest.data().groupId());
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void testSkippingHeartbeat(final boolean shouldSkipHeartbeat) {
// The initial heartbeatInterval is set to 0
createHeartbeatRequestStateWithZeroHeartbeatInterval();
// Mocking notInGroup
when(membershipManager.shouldSkipHeartbeat()).thenReturn(shouldSkipHeartbeat);
NetworkClientDelegate.PollResult result = heartbeatRequestManager.poll(time.milliseconds());
if (!shouldSkipHeartbeat) {
assertEquals(1, result.unsentRequests.size());
assertEquals(0, result.timeUntilNextPollMs);
} else {
assertEquals(0, result.unsentRequests.size());
assertEquals(Long.MAX_VALUE, result.timeUntilNextPollMs);
}
}
@Test
public void testTimerNotDue() {
time.sleep(100); // time elapsed < heartbeatInterval, no heartbeat should be sent
NetworkClientDelegate.PollResult result = heartbeatRequestManager.poll(time.milliseconds());
assertEquals(0, result.unsentRequests.size());
assertEquals(DEFAULT_HEARTBEAT_INTERVAL_MS - 100, result.timeUntilNextPollMs);
assertEquals(DEFAULT_HEARTBEAT_INTERVAL_MS - 100, heartbeatRequestManager.maximumTimeToWait(time.milliseconds()));
// Member in state where it should not send Heartbeat anymore
when(subscriptions.hasAutoAssignedPartitions()).thenReturn(true);
when(membershipManager.shouldSkipHeartbeat()).thenReturn(true);
result = heartbeatRequestManager.poll(time.milliseconds());
assertEquals(Long.MAX_VALUE, result.timeUntilNextPollMs);
}
@Test
public void testHeartbeatNotSentIfAnotherOneInFlight() {
time.sleep(DEFAULT_HEARTBEAT_INTERVAL_MS);
// Heartbeat sent (no response received)
NetworkClientDelegate.PollResult result = heartbeatRequestManager.poll(time.milliseconds());
assertEquals(1, result.unsentRequests.size());
NetworkClientDelegate.UnsentRequest inflightReq = result.unsentRequests.get(0);
time.sleep(DEFAULT_HEARTBEAT_INTERVAL_MS);
result = heartbeatRequestManager.poll(time.milliseconds());
assertEquals(0, result.unsentRequests.size(), "No heartbeat should be sent while a " +
"previous one in-flight");
time.sleep(DEFAULT_HEARTBEAT_INTERVAL_MS);
result = heartbeatRequestManager.poll(time.milliseconds());
assertEquals(0, result.unsentRequests.size(), "No heartbeat should be sent when the " +
"interval expires if there is a previous heartbeat request in-flight");
// Receive response for the inflight after the interval expired. The next HB should be sent
// on the next poll waiting only for the minimal backoff.
inflightReq.handler().onComplete(createHeartbeatResponse(inflightReq, Errors.NONE));
time.sleep(DEFAULT_RETRY_BACKOFF_MS);
result = heartbeatRequestManager.poll(time.milliseconds());
assertEquals(1, result.unsentRequests.size(), "A next heartbeat should be sent on " +
"the first poll after receiving a response that took longer than the interval, " +
"waiting only for the minimal backoff.");
}
@Test
public void testHeartbeatOutsideInterval() {
when(membershipManager.shouldSkipHeartbeat()).thenReturn(false);
when(membershipManager.shouldHeartbeatNow()).thenReturn(true);
NetworkClientDelegate.PollResult result = heartbeatRequestManager.poll(time.milliseconds());
// Heartbeat should be sent
assertEquals(1, result.unsentRequests.size());
// Interval timer reset
assertEquals(DEFAULT_HEARTBEAT_INTERVAL_MS, result.timeUntilNextPollMs);
assertEquals(DEFAULT_HEARTBEAT_INTERVAL_MS, heartbeatRequestManager.maximumTimeToWait(time.milliseconds()));
// Membership manager updated (to transition out of the heartbeating state)
verify(membershipManager).onHeartbeatRequestGenerated();
}
@Test
public void testNetworkTimeout() {
// The initial heartbeatInterval is set to 0
createHeartbeatRequestStateWithZeroHeartbeatInterval();
NetworkClientDelegate.PollResult result = heartbeatRequestManager.poll(time.milliseconds());
assertEquals(1, result.unsentRequests.size());
// Mimic network timeout
result.unsentRequests.get(0).handler().onFailure(time.milliseconds(), new TimeoutException("timeout"));
verify(membershipManager).onHeartbeatFailure(true);
verify(backgroundEventHandler, never()).add(any());
// Assure the manager will backoff on timeout
time.sleep(DEFAULT_RETRY_BACKOFF_MS - 1);
result = heartbeatRequestManager.poll(time.milliseconds());
assertEquals(0, result.unsentRequests.size());
time.sleep(1);
result = heartbeatRequestManager.poll(time.milliseconds());
assertEquals(1, result.unsentRequests.size());
}
@Test
public void testFailureOnFatalException() {
// The initial heartbeatInterval is set to 0
time.sleep(DEFAULT_HEARTBEAT_INTERVAL_MS);
NetworkClientDelegate.PollResult result = heartbeatRequestManager.poll(time.milliseconds());
assertEquals(1, result.unsentRequests.size());
result.unsentRequests.get(0).handler().onFailure(time.milliseconds(), new KafkaException("fatal"));
verify(membershipManager).transitionToFatal();
verify(backgroundEventHandler).add(any());
}
@Test
public void testNoCoordinator() {
when(coordinatorRequestManager.coordinator()).thenReturn(Optional.empty());
NetworkClientDelegate.PollResult result = heartbeatRequestManager.poll(time.milliseconds());
assertEquals(Long.MAX_VALUE, result.timeUntilNextPollMs);
assertEquals(DEFAULT_HEARTBEAT_INTERVAL_MS, heartbeatRequestManager.maximumTimeToWait(time.milliseconds()));
assertEquals(0, result.unsentRequests.size());
}
@ParameterizedTest
@MethodSource("errorProvider")
public void testHeartbeatResponseOnErrorHandling(final Errors error, final boolean isFatal) {
// Handling errors on the second heartbeat
time.sleep(DEFAULT_HEARTBEAT_INTERVAL_MS);
NetworkClientDelegate.PollResult result = heartbeatRequestManager.poll(time.milliseconds());
assertEquals(1, result.unsentRequests.size());
// Manually completing the response to test error handling
when(subscriptions.hasAutoAssignedPartitions()).thenReturn(true);
ClientResponse response = createHeartbeatResponse(
result.unsentRequests.get(0),
error);
result.unsentRequests.get(0).handler().onComplete(response);
ShareGroupHeartbeatResponse mockResponse = (ShareGroupHeartbeatResponse) response.responseBody();
switch (error) {
case NONE:
verify(membershipManager).onHeartbeatSuccess(mockResponse);
assertNextHeartbeatTiming(DEFAULT_HEARTBEAT_INTERVAL_MS);
break;
case COORDINATOR_LOAD_IN_PROGRESS:
verify(backgroundEventHandler, never()).add(any());
assertNextHeartbeatTiming(DEFAULT_RETRY_BACKOFF_MS);
break;
case COORDINATOR_NOT_AVAILABLE:
case NOT_COORDINATOR:
verify(backgroundEventHandler, never()).add(any());
verify(coordinatorRequestManager).markCoordinatorUnknown(any(), anyLong());
assertNextHeartbeatTiming(0);
break;
case UNKNOWN_MEMBER_ID:
verify(backgroundEventHandler, never()).add(any());
assertNextHeartbeatTiming(0);
break;
default:
if (isFatal) {
when(coordinatorRequestManager.coordinator()).thenReturn(Optional.empty());
ensureFatalError(error);
} else {
verify(backgroundEventHandler, never()).add(any());
assertNextHeartbeatTiming(0);
}
break;
}
if (error != Errors.NONE) {
verify(membershipManager).onHeartbeatFailure(false);
}
if (!isFatal) {
// Make sure a next heartbeat is sent for all non-fatal errors (to retry or rejoin)
time.sleep(DEFAULT_HEARTBEAT_INTERVAL_MS);
result = heartbeatRequestManager.poll(time.milliseconds());
assertEquals(1, result.unsentRequests.size());
}
}
@ParameterizedTest
@ValueSource(strings = {SHARE_PROTOCOL_NOT_SUPPORTED_MSG})
public void testUnsupportedVersionGeneratedOnTheBroker(String errorMsg) {
mockResponseWithException(new UnsupportedVersionException(errorMsg), true);
ArgumentCaptor<ErrorEvent> errorEventArgumentCaptor = ArgumentCaptor.forClass(ErrorEvent.class);
verify(backgroundEventHandler).add(errorEventArgumentCaptor.capture());
ErrorEvent errorEvent = errorEventArgumentCaptor.getValue();
assertInstanceOf(Errors.UNSUPPORTED_VERSION.exception().getClass(), errorEvent.error());
assertEquals(errorMsg, errorEvent.error().getMessage());
clearInvocations(backgroundEventHandler);
}
@ParameterizedTest
@ValueSource(strings = {SHARE_PROTOCOL_VERSION_NOT_SUPPORTED_MSG})
public void testUnsupportedVersionGeneratedOnTheClient(String errorMsg) {
mockResponseWithException(new UnsupportedVersionException(errorMsg), false);
ArgumentCaptor<ErrorEvent> errorEventArgumentCaptor = ArgumentCaptor.forClass(ErrorEvent.class);
verify(backgroundEventHandler).add(errorEventArgumentCaptor.capture());
ErrorEvent errorEvent = errorEventArgumentCaptor.getValue();
assertInstanceOf(Errors.UNSUPPORTED_VERSION.exception().getClass(), errorEvent.error());
assertEquals(errorMsg, errorEvent.error().getMessage());
clearInvocations(backgroundEventHandler);
}
private void mockResponseWithException(UnsupportedVersionException exception, boolean isFromBroker) {
time.sleep(DEFAULT_HEARTBEAT_INTERVAL_MS);
NetworkClientDelegate.PollResult result = heartbeatRequestManager.poll(time.milliseconds());
assertEquals(1, result.unsentRequests.size());
// Manually completing the response to test error handling
when(subscriptions.hasAutoAssignedPartitions()).thenReturn(true);
ClientResponse response = createHeartbeatResponseWithException(
result.unsentRequests.get(0),
exception,
isFromBroker);
result.unsentRequests.get(0).handler().onComplete(response);
}
@Test
public void testHeartbeatState() {
mockJoiningMemberData();
heartbeatState = new ShareHeartbeatRequestManager.HeartbeatState(
subscriptions,
membershipManager);
createHeartbeatRequestStateWithZeroHeartbeatInterval();
// The initial ShareGroupHeartbeatRequest sets most fields to their initial empty values
ShareGroupHeartbeatRequestData data = heartbeatState.buildRequestData();
assertEquals(DEFAULT_GROUP_ID, data.groupId());
assertEquals("", data.memberId());
assertEquals(0, data.memberEpoch());
assertEquals(Collections.emptyList(), data.subscribedTopicNames());
membershipManager.onHeartbeatRequestGenerated();
// Mock a response from the group coordinator, that supplies the member ID and a new epoch
when(membershipManager.state()).thenReturn(MemberState.STABLE);
when(subscriptions.hasAutoAssignedPartitions()).thenReturn(true);
when(subscriptions.rebalanceListener()).thenReturn(Optional.empty());
mockStableMemberData();
data = heartbeatState.buildRequestData();
assertEquals(DEFAULT_GROUP_ID, data.groupId());
assertEquals(DEFAULT_MEMBER_ID, data.memberId());
assertEquals(DEFAULT_MEMBER_EPOCH, data.memberEpoch());
assertNull(data.subscribedTopicNames());
membershipManager.onHeartbeatRequestGenerated();
// Join the group and subscribe to a topic, but the response has not yet been received
String topic = "topic1";
subscriptions.subscribe(Collections.singleton(topic), Optional.empty());
when(subscriptions.subscription()).thenReturn(Collections.singleton(topic));
mockRejoiningMemberData();
data = heartbeatState.buildRequestData();
assertEquals(DEFAULT_GROUP_ID, data.groupId());
assertEquals(DEFAULT_MEMBER_ID, data.memberId());
assertEquals(0, data.memberEpoch());
assertEquals(Collections.singletonList(topic), data.subscribedTopicNames());
membershipManager.onHeartbeatRequestGenerated();
data = heartbeatState.buildRequestData();
assertEquals(DEFAULT_GROUP_ID, data.groupId());
assertEquals(DEFAULT_MEMBER_ID, data.memberId());
assertEquals(0, data.memberEpoch());
assertEquals(Collections.singletonList(topic), data.subscribedTopicNames());
// Mock the response from the group coordinator which returns an assignment
ShareGroupHeartbeatResponseData.TopicPartitions tpTopic1 =
new ShareGroupHeartbeatResponseData.TopicPartitions();
Uuid topicId = Uuid.randomUuid();
tpTopic1.setTopicId(topicId);
tpTopic1.setPartitions(Collections.singletonList(0));
ShareGroupHeartbeatResponseData.Assignment assignmentTopic1 =
new ShareGroupHeartbeatResponseData.Assignment();
assignmentTopic1.setTopicPartitions(Collections.singletonList(tpTopic1));
ShareGroupHeartbeatResponse rs1 = new ShareGroupHeartbeatResponse(new ShareGroupHeartbeatResponseData()
.setHeartbeatIntervalMs(DEFAULT_HEARTBEAT_INTERVAL_MS)
.setMemberId(DEFAULT_MEMBER_ID)
.setMemberEpoch(DEFAULT_MEMBER_EPOCH)
.setAssignment(assignmentTopic1));
when(metadata.topicNames()).thenReturn(Collections.singletonMap(topicId, "topic1"));
membershipManager.onHeartbeatSuccess(rs1);
}
@Test
public void testPollTimerExpiration() {
heartbeatRequestManager = createHeartbeatRequestManager(
coordinatorRequestManager,
membershipManager,
heartbeatState,
heartbeatRequestState,
backgroundEventHandler);
when(membershipManager.shouldSkipHeartbeat()).thenReturn(false);
// On poll timer expiration, the member should send a last heartbeat to leave the group
// and notify the membership manager
time.sleep(DEFAULT_MAX_POLL_INTERVAL_MS);
assertHeartbeat(heartbeatRequestManager, DEFAULT_HEARTBEAT_INTERVAL_MS);
verify(membershipManager).transitionToSendingLeaveGroup(true);
verify(heartbeatState).reset();
verify(heartbeatRequestState).reset();
verify(membershipManager).onHeartbeatRequestGenerated();
when(membershipManager.shouldSkipHeartbeat()).thenReturn(true);
assertNoHeartbeat(heartbeatRequestManager);
heartbeatRequestManager.resetPollTimer(time.milliseconds());
assertTrue(pollTimer.notExpired());
verify(membershipManager).maybeRejoinStaleMember();
when(membershipManager.shouldSkipHeartbeat()).thenReturn(false);
assertHeartbeat(heartbeatRequestManager, DEFAULT_HEARTBEAT_INTERVAL_MS);
}
/**
* This is expected to be the case where a member is already leaving the group and the poll
* timer expires. The poll timer expiration should not transition the member to STALE, and
* the member should continue to send heartbeats while the ongoing leaving operation
* completes (send heartbeats while waiting for callbacks before leaving, or send last
* heartbeat to leave).
*/
@Test
public void testPollTimerExpirationShouldNotMarkMemberStaleIfMemberAlreadyLeaving() {
when(membershipManager.shouldSkipHeartbeat()).thenReturn(false);
when(membershipManager.isLeavingGroup()).thenReturn(true);
time.sleep(DEFAULT_MAX_POLL_INTERVAL_MS);
NetworkClientDelegate.PollResult result = heartbeatRequestManager.poll(time.milliseconds());
// No transition to leave due to stale member should be triggered, because the member is
// already leaving the group
verify(membershipManager, never()).transitionToSendingLeaveGroup(anyBoolean());
assertEquals(1, result.unsentRequests.size(), "A heartbeat request should be generated to" +
" complete the ongoing leaving operation that was triggered before the poll timer expired.");
}
@Test
public void testHeartbeatMetrics() {
assertNotNull(getMetric("heartbeat-response-time-max"));
assertNotNull(getMetric("heartbeat-rate"));
assertNotNull(getMetric("heartbeat-total"));
assertNotNull(getMetric("last-heartbeat-seconds-ago"));
// test poll
time.sleep(DEFAULT_HEARTBEAT_INTERVAL_MS);
assertHeartbeat(heartbeatRequestManager, DEFAULT_HEARTBEAT_INTERVAL_MS);
time.sleep(1000);
assertEquals(1.0, getMetric("heartbeat-total").metricValue());
assertEquals((double) TimeUnit.MILLISECONDS.toSeconds(DEFAULT_HEARTBEAT_INTERVAL_MS), getMetric("last-heartbeat-seconds-ago").metricValue());
assertHeartbeat(heartbeatRequestManager, DEFAULT_HEARTBEAT_INTERVAL_MS);
assertEquals(0.06d, (double) getMetric("heartbeat-rate").metricValue(), 0.005d);
assertEquals(2.0, getMetric("heartbeat-total").metricValue());
// Randomly sleep for some time
Random rand = new Random();
int randomSleepS = rand.nextInt(11);
time.sleep(randomSleepS * 1000);
assertEquals((double) randomSleepS, getMetric("last-heartbeat-seconds-ago").metricValue());
}
private void assertHeartbeat(ShareHeartbeatRequestManager hrm, int nextPollMs) {
NetworkClientDelegate.PollResult pollResult = hrm.poll(time.milliseconds());
assertEquals(1, pollResult.unsentRequests.size());
assertEquals(nextPollMs, pollResult.timeUntilNextPollMs);
pollResult.unsentRequests.get(0).handler().onComplete(createHeartbeatResponse(pollResult.unsentRequests.get(0),
Errors.NONE));
}
private void assertNoHeartbeat(ShareHeartbeatRequestManager hrm) {
NetworkClientDelegate.PollResult pollResult = hrm.poll(time.milliseconds());
assertEquals(0, pollResult.unsentRequests.size());
}
private void assertNextHeartbeatTiming(long expectedTimeToNextHeartbeatMs) {
long currentTimeMs = time.milliseconds();
assertEquals(expectedTimeToNextHeartbeatMs, heartbeatRequestState.timeToNextHeartbeatMs(currentTimeMs));
if (expectedTimeToNextHeartbeatMs != 0) {
assertFalse(heartbeatRequestState.canSendRequest(currentTimeMs));
time.sleep(expectedTimeToNextHeartbeatMs);
}
assertTrue(heartbeatRequestState.canSendRequest(time.milliseconds()));
}
private void ensureFatalError(Errors expectedError) {
verify(membershipManager).transitionToFatal();
final ArgumentCaptor<ErrorEvent> errorEventArgumentCaptor = ArgumentCaptor.forClass(ErrorEvent.class);
verify(backgroundEventHandler).add(errorEventArgumentCaptor.capture());
ErrorEvent errorEvent = errorEventArgumentCaptor.getValue();
assertInstanceOf(expectedError.exception().getClass(), errorEvent.error(),
"The fatal error propagated to the app thread does not match the error received in the heartbeat response.");
ensureHeartbeatStopped();
}
private void ensureHeartbeatStopped() {
time.sleep(DEFAULT_HEARTBEAT_INTERVAL_MS);
NetworkClientDelegate.PollResult result = heartbeatRequestManager.poll(time.milliseconds());
assertEquals(0, result.unsentRequests.size());
}
// error, isFatal
private static Collection<Arguments> errorProvider() {
return Arrays.asList(
Arguments.of(Errors.NONE, false),
Arguments.of(Errors.COORDINATOR_NOT_AVAILABLE, false),
Arguments.of(Errors.COORDINATOR_LOAD_IN_PROGRESS, false),
Arguments.of(Errors.NOT_COORDINATOR, false),
Arguments.of(Errors.GROUP_AUTHORIZATION_FAILED, true),
Arguments.of(Errors.INVALID_REQUEST, true),
Arguments.of(Errors.UNKNOWN_MEMBER_ID, false),
Arguments.of(Errors.FENCED_MEMBER_EPOCH, false),
Arguments.of(Errors.UNSUPPORTED_ASSIGNOR, true),
Arguments.of(Errors.UNSUPPORTED_VERSION, true),
Arguments.of(Errors.UNRELEASED_INSTANCE_ID, true),
Arguments.of(Errors.GROUP_MAX_SIZE_REACHED, true));
}
private ClientResponse createHeartbeatResponse(
final NetworkClientDelegate.UnsentRequest request,
final Errors error) {
ShareGroupHeartbeatResponseData data = new ShareGroupHeartbeatResponseData()
.setErrorCode(error.code())
.setHeartbeatIntervalMs(DEFAULT_HEARTBEAT_INTERVAL_MS)
.setMemberId(DEFAULT_MEMBER_ID)
.setMemberEpoch(DEFAULT_MEMBER_EPOCH);
if (error != Errors.NONE) {
data.setErrorMessage("stubbed error message");
}
ShareGroupHeartbeatResponse response = new ShareGroupHeartbeatResponse(data);
return new ClientResponse(
new RequestHeader(ApiKeys.SHARE_GROUP_HEARTBEAT, ApiKeys.SHARE_GROUP_HEARTBEAT.latestVersion(), "client-id", 1),
request.handler(),
"0",
time.milliseconds(),
time.milliseconds(),
false,
null,
null,
response);
}
private ClientResponse createHeartbeatResponseWithException(
final NetworkClientDelegate.UnsentRequest request,
final UnsupportedVersionException exception,
final boolean isFromBroker
) {
ShareGroupHeartbeatResponse response = null;
if (isFromBroker) {
response = new ShareGroupHeartbeatResponse(new ShareGroupHeartbeatResponseData().setErrorCode(Errors.UNSUPPORTED_VERSION.code()));
}
return new ClientResponse(
new RequestHeader(ApiKeys.SHARE_GROUP_HEARTBEAT, ApiKeys.SHARE_GROUP_HEARTBEAT.latestVersion(), "client-id", 1),
request.handler(),
"0",
time.milliseconds(),
time.milliseconds(),
false,
isFromBroker ? null : exception,
null,
response);
}
private ConsumerConfig config() {
Properties prop = new Properties();
prop.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
prop.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
prop.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999");
prop.setProperty(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, String.valueOf(DEFAULT_MAX_POLL_INTERVAL_MS));
prop.setProperty(ConsumerConfig.RETRY_BACKOFF_MS_CONFIG, String.valueOf(DEFAULT_RETRY_BACKOFF_MS));
prop.setProperty(ConsumerConfig.RETRY_BACKOFF_MAX_MS_CONFIG, String.valueOf(DEFAULT_RETRY_BACKOFF_MAX_MS));
return new ConsumerConfig(prop);
}
private KafkaMetric getMetric(final String name) {
return metrics.metrics().get(metrics.metricName(name, SHARE_CONSUMER_COORDINATOR_METRICS));
}
private ShareHeartbeatRequestManager createHeartbeatRequestManager(
final CoordinatorRequestManager coordinatorRequestManager,
final ShareMembershipManager membershipManager,
final ShareHeartbeatRequestManager.HeartbeatState heartbeatState,
final HeartbeatRequestState heartbeatRequestState,
final BackgroundEventHandler backgroundEventHandler) {
LogContext logContext = new LogContext();
pollTimer = time.timer(DEFAULT_MAX_POLL_INTERVAL_MS);
return new ShareHeartbeatRequestManager(
logContext,
pollTimer,
config(),
coordinatorRequestManager,
membershipManager,
heartbeatState,
heartbeatRequestState,
backgroundEventHandler,
new Metrics());
}
private void mockJoiningMemberData() {
when(membershipManager.state()).thenReturn(MemberState.JOINING);
when(membershipManager.groupId()).thenReturn(DEFAULT_GROUP_ID);
when(membershipManager.memberId()).thenReturn("");
when(membershipManager.memberEpoch()).thenReturn(0);
}
private void mockRejoiningMemberData() {
when(membershipManager.state()).thenReturn(MemberState.JOINING);
when(membershipManager.memberEpoch()).thenReturn(0);
}
private void mockStableMemberData() {
when(membershipManager.currentAssignment()).thenReturn(new AbstractMembershipManager.LocalAssignment(0, Collections.emptyMap()));
when(membershipManager.groupId()).thenReturn(DEFAULT_GROUP_ID);
when(membershipManager.memberId()).thenReturn(DEFAULT_MEMBER_ID);
when(membershipManager.memberEpoch()).thenReturn(DEFAULT_MEMBER_EPOCH);
}
}
|
googleapis/google-cloud-java | 36,958 | java-telcoautomation/proto-google-cloud-telcoautomation-v1/src/main/java/com/google/cloud/telcoautomation/v1/CreateDeploymentRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/telcoautomation/v1/telcoautomation.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.telcoautomation.v1;
/**
*
*
* <pre>
* Request object for `CreateDeployment`.
* </pre>
*
* Protobuf type {@code google.cloud.telcoautomation.v1.CreateDeploymentRequest}
*/
public final class CreateDeploymentRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.telcoautomation.v1.CreateDeploymentRequest)
CreateDeploymentRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateDeploymentRequest.newBuilder() to construct.
private CreateDeploymentRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateDeploymentRequest() {
parent_ = "";
deploymentId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateDeploymentRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.telcoautomation.v1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1_CreateDeploymentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.telcoautomation.v1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1_CreateDeploymentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.telcoautomation.v1.CreateDeploymentRequest.class,
com.google.cloud.telcoautomation.v1.CreateDeploymentRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of parent resource.
* Format should be -
* "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of parent resource.
* Format should be -
* "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int DEPLOYMENT_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object deploymentId_ = "";
/**
*
*
* <pre>
* Optional. The name of the deployment.
* </pre>
*
* <code>string deployment_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The deploymentId.
*/
@java.lang.Override
public java.lang.String getDeploymentId() {
java.lang.Object ref = deploymentId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
deploymentId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The name of the deployment.
* </pre>
*
* <code>string deployment_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for deploymentId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getDeploymentIdBytes() {
java.lang.Object ref = deploymentId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
deploymentId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int DEPLOYMENT_FIELD_NUMBER = 3;
private com.google.cloud.telcoautomation.v1.Deployment deployment_;
/**
*
*
* <pre>
* Required. The `Deployment` to create.
* </pre>
*
* <code>
* .google.cloud.telcoautomation.v1.Deployment deployment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the deployment field is set.
*/
@java.lang.Override
public boolean hasDeployment() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The `Deployment` to create.
* </pre>
*
* <code>
* .google.cloud.telcoautomation.v1.Deployment deployment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The deployment.
*/
@java.lang.Override
public com.google.cloud.telcoautomation.v1.Deployment getDeployment() {
return deployment_ == null
? com.google.cloud.telcoautomation.v1.Deployment.getDefaultInstance()
: deployment_;
}
/**
*
*
* <pre>
* Required. The `Deployment` to create.
* </pre>
*
* <code>
* .google.cloud.telcoautomation.v1.Deployment deployment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.telcoautomation.v1.DeploymentOrBuilder getDeploymentOrBuilder() {
return deployment_ == null
? com.google.cloud.telcoautomation.v1.Deployment.getDefaultInstance()
: deployment_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(deploymentId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, deploymentId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getDeployment());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(deploymentId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, deploymentId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getDeployment());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.telcoautomation.v1.CreateDeploymentRequest)) {
return super.equals(obj);
}
com.google.cloud.telcoautomation.v1.CreateDeploymentRequest other =
(com.google.cloud.telcoautomation.v1.CreateDeploymentRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getDeploymentId().equals(other.getDeploymentId())) return false;
if (hasDeployment() != other.hasDeployment()) return false;
if (hasDeployment()) {
if (!getDeployment().equals(other.getDeployment())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + DEPLOYMENT_ID_FIELD_NUMBER;
hash = (53 * hash) + getDeploymentId().hashCode();
if (hasDeployment()) {
hash = (37 * hash) + DEPLOYMENT_FIELD_NUMBER;
hash = (53 * hash) + getDeployment().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.telcoautomation.v1.CreateDeploymentRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.telcoautomation.v1.CreateDeploymentRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1.CreateDeploymentRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.telcoautomation.v1.CreateDeploymentRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1.CreateDeploymentRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.telcoautomation.v1.CreateDeploymentRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1.CreateDeploymentRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.telcoautomation.v1.CreateDeploymentRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1.CreateDeploymentRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.telcoautomation.v1.CreateDeploymentRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1.CreateDeploymentRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.telcoautomation.v1.CreateDeploymentRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.telcoautomation.v1.CreateDeploymentRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request object for `CreateDeployment`.
* </pre>
*
* Protobuf type {@code google.cloud.telcoautomation.v1.CreateDeploymentRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.telcoautomation.v1.CreateDeploymentRequest)
com.google.cloud.telcoautomation.v1.CreateDeploymentRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.telcoautomation.v1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1_CreateDeploymentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.telcoautomation.v1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1_CreateDeploymentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.telcoautomation.v1.CreateDeploymentRequest.class,
com.google.cloud.telcoautomation.v1.CreateDeploymentRequest.Builder.class);
}
// Construct using com.google.cloud.telcoautomation.v1.CreateDeploymentRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getDeploymentFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
deploymentId_ = "";
deployment_ = null;
if (deploymentBuilder_ != null) {
deploymentBuilder_.dispose();
deploymentBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.telcoautomation.v1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1_CreateDeploymentRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.telcoautomation.v1.CreateDeploymentRequest getDefaultInstanceForType() {
return com.google.cloud.telcoautomation.v1.CreateDeploymentRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.telcoautomation.v1.CreateDeploymentRequest build() {
com.google.cloud.telcoautomation.v1.CreateDeploymentRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.telcoautomation.v1.CreateDeploymentRequest buildPartial() {
com.google.cloud.telcoautomation.v1.CreateDeploymentRequest result =
new com.google.cloud.telcoautomation.v1.CreateDeploymentRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.telcoautomation.v1.CreateDeploymentRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.deploymentId_ = deploymentId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.deployment_ = deploymentBuilder_ == null ? deployment_ : deploymentBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.telcoautomation.v1.CreateDeploymentRequest) {
return mergeFrom((com.google.cloud.telcoautomation.v1.CreateDeploymentRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.telcoautomation.v1.CreateDeploymentRequest other) {
if (other == com.google.cloud.telcoautomation.v1.CreateDeploymentRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getDeploymentId().isEmpty()) {
deploymentId_ = other.deploymentId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasDeployment()) {
mergeDeployment(other.getDeployment());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
deploymentId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getDeploymentFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of parent resource.
* Format should be -
* "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of parent resource.
* Format should be -
* "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of parent resource.
* Format should be -
* "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of parent resource.
* Format should be -
* "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of parent resource.
* Format should be -
* "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object deploymentId_ = "";
/**
*
*
* <pre>
* Optional. The name of the deployment.
* </pre>
*
* <code>string deployment_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The deploymentId.
*/
public java.lang.String getDeploymentId() {
java.lang.Object ref = deploymentId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
deploymentId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The name of the deployment.
* </pre>
*
* <code>string deployment_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for deploymentId.
*/
public com.google.protobuf.ByteString getDeploymentIdBytes() {
java.lang.Object ref = deploymentId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
deploymentId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The name of the deployment.
* </pre>
*
* <code>string deployment_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The deploymentId to set.
* @return This builder for chaining.
*/
public Builder setDeploymentId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
deploymentId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The name of the deployment.
* </pre>
*
* <code>string deployment_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearDeploymentId() {
deploymentId_ = getDefaultInstance().getDeploymentId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The name of the deployment.
* </pre>
*
* <code>string deployment_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for deploymentId to set.
* @return This builder for chaining.
*/
public Builder setDeploymentIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
deploymentId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.telcoautomation.v1.Deployment deployment_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.telcoautomation.v1.Deployment,
com.google.cloud.telcoautomation.v1.Deployment.Builder,
com.google.cloud.telcoautomation.v1.DeploymentOrBuilder>
deploymentBuilder_;
/**
*
*
* <pre>
* Required. The `Deployment` to create.
* </pre>
*
* <code>
* .google.cloud.telcoautomation.v1.Deployment deployment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the deployment field is set.
*/
public boolean hasDeployment() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Required. The `Deployment` to create.
* </pre>
*
* <code>
* .google.cloud.telcoautomation.v1.Deployment deployment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The deployment.
*/
public com.google.cloud.telcoautomation.v1.Deployment getDeployment() {
if (deploymentBuilder_ == null) {
return deployment_ == null
? com.google.cloud.telcoautomation.v1.Deployment.getDefaultInstance()
: deployment_;
} else {
return deploymentBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The `Deployment` to create.
* </pre>
*
* <code>
* .google.cloud.telcoautomation.v1.Deployment deployment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setDeployment(com.google.cloud.telcoautomation.v1.Deployment value) {
if (deploymentBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
deployment_ = value;
} else {
deploymentBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The `Deployment` to create.
* </pre>
*
* <code>
* .google.cloud.telcoautomation.v1.Deployment deployment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setDeployment(
com.google.cloud.telcoautomation.v1.Deployment.Builder builderForValue) {
if (deploymentBuilder_ == null) {
deployment_ = builderForValue.build();
} else {
deploymentBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The `Deployment` to create.
* </pre>
*
* <code>
* .google.cloud.telcoautomation.v1.Deployment deployment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeDeployment(com.google.cloud.telcoautomation.v1.Deployment value) {
if (deploymentBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& deployment_ != null
&& deployment_ != com.google.cloud.telcoautomation.v1.Deployment.getDefaultInstance()) {
getDeploymentBuilder().mergeFrom(value);
} else {
deployment_ = value;
}
} else {
deploymentBuilder_.mergeFrom(value);
}
if (deployment_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The `Deployment` to create.
* </pre>
*
* <code>
* .google.cloud.telcoautomation.v1.Deployment deployment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearDeployment() {
bitField0_ = (bitField0_ & ~0x00000004);
deployment_ = null;
if (deploymentBuilder_ != null) {
deploymentBuilder_.dispose();
deploymentBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The `Deployment` to create.
* </pre>
*
* <code>
* .google.cloud.telcoautomation.v1.Deployment deployment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.telcoautomation.v1.Deployment.Builder getDeploymentBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getDeploymentFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The `Deployment` to create.
* </pre>
*
* <code>
* .google.cloud.telcoautomation.v1.Deployment deployment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.telcoautomation.v1.DeploymentOrBuilder getDeploymentOrBuilder() {
if (deploymentBuilder_ != null) {
return deploymentBuilder_.getMessageOrBuilder();
} else {
return deployment_ == null
? com.google.cloud.telcoautomation.v1.Deployment.getDefaultInstance()
: deployment_;
}
}
/**
*
*
* <pre>
* Required. The `Deployment` to create.
* </pre>
*
* <code>
* .google.cloud.telcoautomation.v1.Deployment deployment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.telcoautomation.v1.Deployment,
com.google.cloud.telcoautomation.v1.Deployment.Builder,
com.google.cloud.telcoautomation.v1.DeploymentOrBuilder>
getDeploymentFieldBuilder() {
if (deploymentBuilder_ == null) {
deploymentBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.telcoautomation.v1.Deployment,
com.google.cloud.telcoautomation.v1.Deployment.Builder,
com.google.cloud.telcoautomation.v1.DeploymentOrBuilder>(
getDeployment(), getParentForChildren(), isClean());
deployment_ = null;
}
return deploymentBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.telcoautomation.v1.CreateDeploymentRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.telcoautomation.v1.CreateDeploymentRequest)
private static final com.google.cloud.telcoautomation.v1.CreateDeploymentRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.telcoautomation.v1.CreateDeploymentRequest();
}
public static com.google.cloud.telcoautomation.v1.CreateDeploymentRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateDeploymentRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateDeploymentRequest>() {
@java.lang.Override
public CreateDeploymentRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateDeploymentRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateDeploymentRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.telcoautomation.v1.CreateDeploymentRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hadoop-common | 36,907 | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import static org.apache.hadoop.util.ExitUtil.terminate;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.URI;
import java.net.URL;
import java.security.PrivilegedAction;
import java.security.PrivilegedExceptionAction;
import java.util.*;
import com.google.common.collect.Lists;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.HAUtil;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.NameNodeProxies;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
import org.apache.hadoop.hdfs.server.common.InconsistentFSStateException;
import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory;
import org.apache.hadoop.hdfs.server.common.Storage.StorageState;
import org.apache.hadoop.hdfs.server.namenode.FileJournalManager.EditLogFile;
import org.apache.hadoop.hdfs.server.namenode.NNStorage.NameNodeDirType;
import org.apache.hadoop.hdfs.server.namenode.NNStorage.NameNodeFile;
import org.apache.hadoop.hdfs.server.namenode.NNStorageRetentionManager.StoragePurger;
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocol;
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog;
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest;
import org.apache.hadoop.hdfs.util.Canceler;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.http.HttpServer2;
import org.apache.hadoop.io.MD5Hash;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.metrics2.source.JvmMetrics;
import org.apache.hadoop.metrics2.util.MBeans;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Daemon;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Time;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import org.apache.hadoop.util.VersionInfo;
import javax.management.ObjectName;
/**********************************************************
* The Secondary NameNode is a helper to the primary NameNode.
* The Secondary is responsible for supporting periodic checkpoints
* of the HDFS metadata. The current design allows only one Secondary
* NameNode per HDFs cluster.
*
* The Secondary NameNode is a daemon that periodically wakes
* up (determined by the schedule specified in the configuration),
* triggers a periodic checkpoint and then goes back to sleep.
* The Secondary NameNode uses the NamenodeProtocol to talk to the
* primary NameNode.
*
**********************************************************/
@InterfaceAudience.Private
public class SecondaryNameNode implements Runnable,
SecondaryNameNodeInfoMXBean {
static{
HdfsConfiguration.init();
}
public static final Log LOG =
LogFactory.getLog(SecondaryNameNode.class.getName());
private final long starttime = Time.now();
private volatile long lastCheckpointTime = 0;
private URL fsName;
private CheckpointStorage checkpointImage;
private NamenodeProtocol namenode;
private Configuration conf;
private InetSocketAddress nameNodeAddr;
private volatile boolean shouldRun;
private HttpServer2 infoServer;
private Collection<URI> checkpointDirs;
private List<URI> checkpointEditsDirs;
private CheckpointConf checkpointConf;
private FSNamesystem namesystem;
private Thread checkpointThread;
private ObjectName nameNodeStatusBeanName;
private String legacyOivImageDir;
@Override
public String toString() {
return getClass().getSimpleName() + " Status"
+ "\nName Node Address : " + nameNodeAddr
+ "\nStart Time : " + new Date(starttime)
+ "\nLast Checkpoint : " + (lastCheckpointTime == 0? "--":
((Time.monotonicNow() - lastCheckpointTime) / 1000))
+ " seconds ago"
+ "\nCheckpoint Period : " + checkpointConf.getPeriod() + " seconds"
+ "\nCheckpoint Transactions: " + checkpointConf.getTxnCount()
+ "\nCheckpoint Dirs : " + checkpointDirs
+ "\nCheckpoint Edits Dirs : " + checkpointEditsDirs;
}
@VisibleForTesting
FSImage getFSImage() {
return checkpointImage;
}
@VisibleForTesting
int getMergeErrorCount() {
return checkpointImage.getMergeErrorCount();
}
@VisibleForTesting
public FSNamesystem getFSNamesystem() {
return namesystem;
}
@VisibleForTesting
void setFSImage(CheckpointStorage image) {
this.checkpointImage = image;
}
@VisibleForTesting
NamenodeProtocol getNameNode() {
return namenode;
}
@VisibleForTesting
void setNameNode(NamenodeProtocol namenode) {
this.namenode = namenode;
}
/**
* Create a connection to the primary namenode.
*/
public SecondaryNameNode(Configuration conf) throws IOException {
this(conf, new CommandLineOpts());
}
public SecondaryNameNode(Configuration conf,
CommandLineOpts commandLineOpts) throws IOException {
try {
String nsId = DFSUtil.getSecondaryNameServiceId(conf);
if (HAUtil.isHAEnabled(conf, nsId)) {
throw new IOException(
"Cannot use SecondaryNameNode in an HA cluster." +
" The Standby Namenode will perform checkpointing.");
}
NameNode.initializeGenericKeys(conf, nsId, null);
initialize(conf, commandLineOpts);
} catch (IOException e) {
shutdown();
throw e;
} catch (HadoopIllegalArgumentException e) {
shutdown();
throw e;
}
}
public static InetSocketAddress getHttpAddress(Configuration conf) {
return NetUtils.createSocketAddr(conf.get(
DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY,
DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_DEFAULT));
}
/**
* Initialize SecondaryNameNode.
*/
private void initialize(final Configuration conf,
CommandLineOpts commandLineOpts) throws IOException {
final InetSocketAddress infoSocAddr = getHttpAddress(conf);
final String infoBindAddress = infoSocAddr.getHostName();
UserGroupInformation.setConfiguration(conf);
if (UserGroupInformation.isSecurityEnabled()) {
SecurityUtil.login(conf,
DFSConfigKeys.DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY,
DFSConfigKeys.DFS_SECONDARY_NAMENODE_KERBEROS_PRINCIPAL_KEY, infoBindAddress);
}
// initiate Java VM metrics
DefaultMetricsSystem.initialize("SecondaryNameNode");
JvmMetrics.create("SecondaryNameNode",
conf.get(DFSConfigKeys.DFS_METRICS_SESSION_ID_KEY),
DefaultMetricsSystem.instance());
// Create connection to the namenode.
shouldRun = true;
nameNodeAddr = NameNode.getServiceAddress(conf, true);
this.conf = conf;
this.namenode = NameNodeProxies.createNonHAProxy(conf, nameNodeAddr,
NamenodeProtocol.class, UserGroupInformation.getCurrentUser(),
true).getProxy();
// initialize checkpoint directories
fsName = getInfoServer();
checkpointDirs = FSImage.getCheckpointDirs(conf,
"/tmp/hadoop/dfs/namesecondary");
checkpointEditsDirs = FSImage.getCheckpointEditsDirs(conf,
"/tmp/hadoop/dfs/namesecondary");
checkpointImage = new CheckpointStorage(conf, checkpointDirs, checkpointEditsDirs);
checkpointImage.recoverCreate(commandLineOpts.shouldFormat());
checkpointImage.deleteTempEdits();
namesystem = new FSNamesystem(conf, checkpointImage, true);
// Disable quota checks
namesystem.dir.disableQuotaChecks();
// Initialize other scheduling parameters from the configuration
checkpointConf = new CheckpointConf(conf);
final InetSocketAddress httpAddr = infoSocAddr;
final String httpsAddrString = conf.get(
DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTPS_ADDRESS_KEY,
DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTPS_ADDRESS_DEFAULT);
InetSocketAddress httpsAddr = NetUtils.createSocketAddr(httpsAddrString);
HttpServer2.Builder builder = DFSUtil.httpServerTemplateForNNAndJN(conf,
httpAddr, httpsAddr, "secondary",
DFSConfigKeys.DFS_SECONDARY_NAMENODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY,
DFSConfigKeys.DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY);
nameNodeStatusBeanName = MBeans.register("SecondaryNameNode",
"SecondaryNameNodeInfo", this);
infoServer = builder.build();
infoServer.setAttribute("secondary.name.node", this);
infoServer.setAttribute("name.system.image", checkpointImage);
infoServer.setAttribute(JspHelper.CURRENT_CONF, conf);
infoServer.addInternalServlet("imagetransfer", ImageServlet.PATH_SPEC,
ImageServlet.class, true);
infoServer.start();
LOG.info("Web server init done");
HttpConfig.Policy policy = DFSUtil.getHttpPolicy(conf);
int connIdx = 0;
if (policy.isHttpEnabled()) {
InetSocketAddress httpAddress = infoServer.getConnectorAddress(connIdx++);
conf.set(DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY,
NetUtils.getHostPortString(httpAddress));
}
if (policy.isHttpsEnabled()) {
InetSocketAddress httpsAddress = infoServer.getConnectorAddress(connIdx);
conf.set(DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTPS_ADDRESS_KEY,
NetUtils.getHostPortString(httpsAddress));
}
legacyOivImageDir = conf.get(
DFSConfigKeys.DFS_NAMENODE_LEGACY_OIV_IMAGE_DIR_KEY);
LOG.info("Checkpoint Period :" + checkpointConf.getPeriod() + " secs "
+ "(" + checkpointConf.getPeriod() / 60 + " min)");
LOG.info("Log Size Trigger :" + checkpointConf.getTxnCount() + " txns");
}
/**
* Wait for the service to finish.
* (Normally, it runs forever.)
*/
private void join() {
try {
infoServer.join();
} catch (InterruptedException ie) {
LOG.debug("Exception ", ie);
}
}
/**
* Shut down this instance of the datanode.
* Returns only after shutdown is complete.
*/
public void shutdown() {
shouldRun = false;
if (checkpointThread != null) {
checkpointThread.interrupt();
try {
checkpointThread.join(10000);
} catch (InterruptedException e) {
LOG.info("Interrupted waiting to join on checkpointer thread");
Thread.currentThread().interrupt(); // maintain status
}
}
try {
if (infoServer != null) {
infoServer.stop();
infoServer = null;
}
} catch (Exception e) {
LOG.warn("Exception shutting down SecondaryNameNode", e);
}
if (nameNodeStatusBeanName != null) {
MBeans.unregister(nameNodeStatusBeanName);
nameNodeStatusBeanName = null;
}
try {
if (checkpointImage != null) {
checkpointImage.close();
checkpointImage = null;
}
} catch(IOException e) {
LOG.warn("Exception while closing CheckpointStorage", e);
}
if (namesystem != null) {
namesystem.shutdown();
namesystem = null;
}
}
@Override
public void run() {
SecurityUtil.doAsLoginUserOrFatal(
new PrivilegedAction<Object>() {
@Override
public Object run() {
doWork();
return null;
}
});
}
//
// The main work loop
//
public void doWork() {
//
// Poll the Namenode (once every checkpointCheckPeriod seconds) to find the
// number of transactions in the edit log that haven't yet been checkpointed.
//
long period = checkpointConf.getCheckPeriod();
int maxRetries = checkpointConf.getMaxRetriesOnMergeError();
while (shouldRun) {
try {
Thread.sleep(1000 * period);
} catch (InterruptedException ie) {
// do nothing
}
if (!shouldRun) {
break;
}
try {
// We may have lost our ticket since last checkpoint, log in again, just in case
if(UserGroupInformation.isSecurityEnabled())
UserGroupInformation.getCurrentUser().checkTGTAndReloginFromKeytab();
final long now = Time.monotonicNow();
if (shouldCheckpointBasedOnCount() ||
now >= lastCheckpointTime + 1000 * checkpointConf.getPeriod()) {
doCheckpoint();
lastCheckpointTime = now;
}
} catch (IOException e) {
LOG.error("Exception in doCheckpoint", e);
e.printStackTrace();
// Prevent a huge number of edits from being created due to
// unrecoverable conditions and endless retries.
if (checkpointImage.getMergeErrorCount() > maxRetries) {
LOG.fatal("Merging failed " +
checkpointImage.getMergeErrorCount() + " times.");
terminate(1);
}
} catch (Throwable e) {
LOG.fatal("Throwable Exception in doCheckpoint", e);
e.printStackTrace();
terminate(1, e);
}
}
}
/**
* Download <code>fsimage</code> and <code>edits</code>
* files from the name-node.
* @return true if a new image has been downloaded and needs to be loaded
* @throws IOException
*/
static boolean downloadCheckpointFiles(
final URL nnHostPort,
final FSImage dstImage,
final CheckpointSignature sig,
final RemoteEditLogManifest manifest
) throws IOException {
// Sanity check manifest - these could happen if, eg, someone on the
// NN side accidentally rmed the storage directories
if (manifest.getLogs().isEmpty()) {
throw new IOException("Found no edit logs to download on NN since txid "
+ sig.mostRecentCheckpointTxId);
}
long expectedTxId = sig.mostRecentCheckpointTxId + 1;
if (manifest.getLogs().get(0).getStartTxId() != expectedTxId) {
throw new IOException("Bad edit log manifest (expected txid = " +
expectedTxId + ": " + manifest);
}
try {
Boolean b = UserGroupInformation.getCurrentUser().doAs(
new PrivilegedExceptionAction<Boolean>() {
@Override
public Boolean run() throws Exception {
dstImage.getStorage().cTime = sig.cTime;
// get fsimage
if (sig.mostRecentCheckpointTxId ==
dstImage.getStorage().getMostRecentCheckpointTxId()) {
LOG.info("Image has not changed. Will not download image.");
} else {
LOG.info("Image has changed. Downloading updated image from NN.");
MD5Hash downloadedHash = TransferFsImage.downloadImageToStorage(
nnHostPort, sig.mostRecentCheckpointTxId,
dstImage.getStorage(), true);
dstImage.saveDigestAndRenameCheckpointImage(NameNodeFile.IMAGE,
sig.mostRecentCheckpointTxId, downloadedHash);
}
// get edits file
for (RemoteEditLog log : manifest.getLogs()) {
TransferFsImage.downloadEditsToStorage(
nnHostPort, log, dstImage.getStorage());
}
// true if we haven't loaded all the transactions represented by the
// downloaded fsimage.
return dstImage.getLastAppliedTxId() < sig.mostRecentCheckpointTxId;
}
});
return b.booleanValue();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
InetSocketAddress getNameNodeAddress() {
return nameNodeAddr;
}
/**
* Returns the Jetty server that the Namenode is listening on.
*/
private URL getInfoServer() throws IOException {
URI fsName = FileSystem.getDefaultUri(conf);
if (!HdfsConstants.HDFS_URI_SCHEME.equalsIgnoreCase(fsName.getScheme())) {
throw new IOException("This is not a DFS");
}
final String scheme = DFSUtil.getHttpClientScheme(conf);
URI address = DFSUtil.getInfoServerWithDefaultHost(fsName.getHost(), conf,
scheme);
LOG.debug("Will connect to NameNode at " + address);
return address.toURL();
}
/**
* Create a new checkpoint
* @return if the image is fetched from primary or not
*/
@VisibleForTesting
@SuppressWarnings("deprecated")
public boolean doCheckpoint() throws IOException {
checkpointImage.ensureCurrentDirExists();
NNStorage dstStorage = checkpointImage.getStorage();
// Tell the namenode to start logging transactions in a new edit file
// Returns a token that would be used to upload the merged image.
CheckpointSignature sig = namenode.rollEditLog();
boolean loadImage = false;
boolean isFreshCheckpointer = (checkpointImage.getNamespaceID() == 0);
boolean isSameCluster =
(dstStorage.versionSupportsFederation(NameNodeLayoutVersion.FEATURES)
&& sig.isSameCluster(checkpointImage)) ||
(!dstStorage.versionSupportsFederation(NameNodeLayoutVersion.FEATURES)
&& sig.namespaceIdMatches(checkpointImage));
if (isFreshCheckpointer ||
(isSameCluster &&
!sig.storageVersionMatches(checkpointImage.getStorage()))) {
// if we're a fresh 2NN, or if we're on the same cluster and our storage
// needs an upgrade, just take the storage info from the server.
dstStorage.setStorageInfo(sig);
dstStorage.setClusterID(sig.getClusterID());
dstStorage.setBlockPoolID(sig.getBlockpoolID());
loadImage = true;
}
sig.validateStorageInfo(checkpointImage);
// error simulation code for junit test
CheckpointFaultInjector.getInstance().afterSecondaryCallsRollEditLog();
RemoteEditLogManifest manifest =
namenode.getEditLogManifest(sig.mostRecentCheckpointTxId + 1);
// Fetch fsimage and edits. Reload the image if previous merge failed.
loadImage |= downloadCheckpointFiles(
fsName, checkpointImage, sig, manifest) |
checkpointImage.hasMergeError();
try {
doMerge(sig, manifest, loadImage, checkpointImage, namesystem);
} catch (IOException ioe) {
// A merge error occurred. The in-memory file system state may be
// inconsistent, so the image and edits need to be reloaded.
checkpointImage.setMergeError();
throw ioe;
}
// Clear any error since merge was successful.
checkpointImage.clearMergeError();
//
// Upload the new image into the NameNode. Then tell the Namenode
// to make this new uploaded image as the most current image.
//
long txid = checkpointImage.getLastAppliedTxId();
TransferFsImage.uploadImageFromStorage(fsName, conf, dstStorage,
NameNodeFile.IMAGE, txid);
// error simulation code for junit test
CheckpointFaultInjector.getInstance().afterSecondaryUploadsNewImage();
LOG.warn("Checkpoint done. New Image Size: "
+ dstStorage.getFsImageName(txid).length());
if (legacyOivImageDir != null && !legacyOivImageDir.isEmpty()) {
try {
checkpointImage.saveLegacyOIVImage(namesystem, legacyOivImageDir,
new Canceler());
} catch (IOException e) {
LOG.warn("Failed to write legacy OIV image: ", e);
}
}
return loadImage;
}
/**
* @param opts The parameters passed to this program.
* @exception Exception if the filesystem does not exist.
* @return 0 on success, non zero on error.
*/
private int processStartupCommand(CommandLineOpts opts) throws Exception {
if (opts.getCommand() == null) {
return 0;
}
String cmd = opts.getCommand().toString().toLowerCase();
int exitCode = 0;
try {
switch (opts.getCommand()) {
case CHECKPOINT:
long count = countUncheckpointedTxns();
if (count > checkpointConf.getTxnCount() ||
opts.shouldForceCheckpoint()) {
doCheckpoint();
} else {
System.err.println("EditLog size " + count + " transactions is " +
"smaller than configured checkpoint " +
"interval " + checkpointConf.getTxnCount() + " transactions.");
System.err.println("Skipping checkpoint.");
}
break;
case GETEDITSIZE:
long uncheckpointed = countUncheckpointedTxns();
System.out.println("NameNode has " + uncheckpointed +
" uncheckpointed transactions");
break;
default:
throw new AssertionError("bad command enum: " + opts.getCommand());
}
} catch (RemoteException e) {
//
// This is a error returned by hadoop server. Print
// out the first line of the error mesage, ignore the stack trace.
exitCode = 1;
try {
String[] content;
content = e.getLocalizedMessage().split("\n");
LOG.error(cmd + ": " + content[0]);
} catch (Exception ex) {
LOG.error(cmd + ": " + ex.getLocalizedMessage());
}
} catch (IOException e) {
//
// IO exception encountered locally.
//
exitCode = 1;
LOG.error(cmd + ": " + e.getLocalizedMessage());
} finally {
// Does the RPC connection need to be closed?
}
return exitCode;
}
private long countUncheckpointedTxns() throws IOException {
long curTxId = namenode.getTransactionID();
long uncheckpointedTxns = curTxId -
checkpointImage.getStorage().getMostRecentCheckpointTxId();
assert uncheckpointedTxns >= 0;
return uncheckpointedTxns;
}
boolean shouldCheckpointBasedOnCount() throws IOException {
return countUncheckpointedTxns() >= checkpointConf.getTxnCount();
}
/**
* main() has some simple utility methods.
* @param argv Command line parameters.
* @exception Exception if the filesystem does not exist.
*/
public static void main(String[] argv) throws Exception {
CommandLineOpts opts = SecondaryNameNode.parseArgs(argv);
if (opts == null) {
LOG.fatal("Failed to parse options");
terminate(1);
} else if (opts.shouldPrintHelp()) {
opts.usage();
System.exit(0);
}
StringUtils.startupShutdownMessage(SecondaryNameNode.class, argv, LOG);
Configuration tconf = new HdfsConfiguration();
SecondaryNameNode secondary = null;
try {
secondary = new SecondaryNameNode(tconf, opts);
} catch (IOException ioe) {
LOG.fatal("Failed to start secondary namenode", ioe);
terminate(1);
}
if (opts != null && opts.getCommand() != null) {
int ret = secondary.processStartupCommand(opts);
terminate(ret);
}
if (secondary != null) {
secondary.startCheckpointThread();
secondary.join();
}
}
public void startCheckpointThread() {
Preconditions.checkState(checkpointThread == null,
"Should not already have a thread");
Preconditions.checkState(shouldRun, "shouldRun should be true");
checkpointThread = new Daemon(this);
checkpointThread.start();
}
@Override // SecondaryNameNodeInfoMXXBean
public String getHostAndPort() {
return NetUtils.getHostPortString(nameNodeAddr);
}
@Override // SecondaryNameNodeInfoMXXBean
public long getStartTime() {
return starttime;
}
@Override // SecondaryNameNodeInfoMXXBean
public long getLastCheckpointTime() {
return lastCheckpointTime;
}
@Override // SecondaryNameNodeInfoMXXBean
public String[] getCheckpointDirectories() {
ArrayList<String> r = Lists.newArrayListWithCapacity(checkpointDirs.size());
for (URI d : checkpointDirs) {
r.add(d.toString());
}
return r.toArray(new String[r.size()]);
}
@Override // SecondaryNameNodeInfoMXXBean
public String[] getCheckpointEditlogDirectories() {
ArrayList<String> r = Lists.newArrayListWithCapacity(checkpointEditsDirs.size());
for (URI d : checkpointEditsDirs) {
r.add(d.toString());
}
return r.toArray(new String[r.size()]);
}
@Override // VersionInfoMXBean
public String getCompileInfo() {
return VersionInfo.getDate() + " by " + VersionInfo.getUser() +
" from " + VersionInfo.getBranch();
}
@Override // VersionInfoMXBean
public String getSoftwareVersion() {
return VersionInfo.getVersion();
}
/**
* Container for parsed command-line options.
*/
@SuppressWarnings("static-access")
static class CommandLineOpts {
private final Options options = new Options();
private final Option geteditsizeOpt;
private final Option checkpointOpt;
private final Option formatOpt;
private final Option helpOpt;
Command cmd;
enum Command {
GETEDITSIZE,
CHECKPOINT;
}
private boolean shouldForce;
private boolean shouldFormat;
private boolean shouldPrintHelp;
CommandLineOpts() {
geteditsizeOpt = new Option("geteditsize",
"return the number of uncheckpointed transactions on the NameNode");
checkpointOpt = OptionBuilder.withArgName("force")
.hasOptionalArg().withDescription("checkpoint on startup").create("checkpoint");;
formatOpt = new Option("format", "format the local storage during startup");
helpOpt = new Option("h", "help", false, "get help information");
options.addOption(geteditsizeOpt);
options.addOption(checkpointOpt);
options.addOption(formatOpt);
options.addOption(helpOpt);
}
public boolean shouldFormat() {
return shouldFormat;
}
public boolean shouldPrintHelp() {
return shouldPrintHelp;
}
public void parse(String ... argv) throws ParseException {
CommandLineParser parser = new PosixParser();
CommandLine cmdLine = parser.parse(options, argv);
if (cmdLine.hasOption(helpOpt.getOpt())
|| cmdLine.hasOption(helpOpt.getLongOpt())) {
shouldPrintHelp = true;
return;
}
boolean hasGetEdit = cmdLine.hasOption(geteditsizeOpt.getOpt());
boolean hasCheckpoint = cmdLine.hasOption(checkpointOpt.getOpt());
if (hasGetEdit && hasCheckpoint) {
throw new ParseException("May not pass both "
+ geteditsizeOpt.getOpt() + " and "
+ checkpointOpt.getOpt());
}
if (hasGetEdit) {
cmd = Command.GETEDITSIZE;
} else if (hasCheckpoint) {
cmd = Command.CHECKPOINT;
String arg = cmdLine.getOptionValue(checkpointOpt.getOpt());
if ("force".equals(arg)) {
shouldForce = true;
} else if (arg != null) {
throw new ParseException("-checkpoint may only take 'force' as an "
+ "argument");
}
}
if (cmdLine.hasOption(formatOpt.getOpt())) {
shouldFormat = true;
}
}
public Command getCommand() {
return cmd;
}
public boolean shouldForceCheckpoint() {
return shouldForce;
}
void usage() {
String header = "The Secondary NameNode is a helper "
+ "to the primary NameNode. The Secondary is responsible "
+ "for supporting periodic checkpoints of the HDFS metadata. "
+ "The current design allows only one Secondary NameNode "
+ "per HDFS cluster.";
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("secondarynamenode", header, options, "", false);
}
}
private static CommandLineOpts parseArgs(String[] argv) {
CommandLineOpts opts = new CommandLineOpts();
try {
opts.parse(argv);
} catch (ParseException pe) {
LOG.error(pe.getMessage());
opts.usage();
return null;
}
return opts;
}
static class CheckpointStorage extends FSImage {
private int mergeErrorCount;
private static class CheckpointLogPurger implements LogsPurgeable {
private final NNStorage storage;
private final StoragePurger purger
= new NNStorageRetentionManager.DeletionStoragePurger();
public CheckpointLogPurger(NNStorage storage) {
this.storage = storage;
}
@Override
public void purgeLogsOlderThan(long minTxIdToKeep) throws IOException {
Iterator<StorageDirectory> iter = storage.dirIterator();
while (iter.hasNext()) {
StorageDirectory dir = iter.next();
List<EditLogFile> editFiles = FileJournalManager.matchEditLogs(
dir.getCurrentDir());
for (EditLogFile f : editFiles) {
if (f.getLastTxId() < minTxIdToKeep) {
purger.purgeLog(f);
}
}
}
}
@Override
public void selectInputStreams(Collection<EditLogInputStream> streams,
long fromTxId, boolean inProgressOk) {
Iterator<StorageDirectory> iter = storage.dirIterator();
while (iter.hasNext()) {
StorageDirectory dir = iter.next();
List<EditLogFile> editFiles;
try {
editFiles = FileJournalManager.matchEditLogs(
dir.getCurrentDir());
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
FileJournalManager.addStreamsToCollectionFromFiles(editFiles, streams,
fromTxId, inProgressOk);
}
}
}
/**
* Construct a checkpoint image.
* @param conf Node configuration.
* @param imageDirs URIs of storage for image.
* @param editsDirs URIs of storage for edit logs.
* @throws IOException If storage cannot be access.
*/
CheckpointStorage(Configuration conf,
Collection<URI> imageDirs,
List<URI> editsDirs) throws IOException {
super(conf, imageDirs, editsDirs);
// the 2NN never writes edits -- it only downloads them. So
// we shouldn't have any editLog instance. Setting to null
// makes sure we don't accidentally depend on it.
editLog = null;
mergeErrorCount = 0;
// Replace the archival manager with one that can actually work on the
// 2NN's edits storage.
this.archivalManager = new NNStorageRetentionManager(conf, storage,
new CheckpointLogPurger(storage));
}
/**
* Analyze checkpoint directories.
* Create directories if they do not exist.
* Recover from an unsuccessful checkpoint if necessary.
*
* @throws IOException
*/
void recoverCreate(boolean format) throws IOException {
storage.attemptRestoreRemovedStorage();
storage.unlockAll();
for (Iterator<StorageDirectory> it =
storage.dirIterator(); it.hasNext();) {
StorageDirectory sd = it.next();
boolean isAccessible = true;
try { // create directories if don't exist yet
if(!sd.getRoot().mkdirs()) {
// do nothing, directory is already created
}
} catch(SecurityException se) {
isAccessible = false;
}
if(!isAccessible)
throw new InconsistentFSStateException(sd.getRoot(),
"cannot access checkpoint directory.");
if (format) {
// Don't confirm, since this is just the secondary namenode.
LOG.info("Formatting storage directory " + sd);
sd.clearDirectory();
}
StorageState curState;
try {
curState = sd.analyzeStorage(HdfsServerConstants.StartupOption.REGULAR, storage);
// sd is locked but not opened
switch(curState) {
case NON_EXISTENT:
// fail if any of the configured checkpoint dirs are inaccessible
throw new InconsistentFSStateException(sd.getRoot(),
"checkpoint directory does not exist or is not accessible.");
case NOT_FORMATTED:
break; // it's ok since initially there is no current and VERSION
case NORMAL:
// Read the VERSION file. This verifies that:
// (a) the VERSION file for each of the directories is the same,
// and (b) when we connect to a NN, we can verify that the remote
// node matches the same namespace that we ran on previously.
storage.readProperties(sd);
break;
default: // recovery is possible
sd.doRecover(curState);
}
} catch (IOException ioe) {
sd.unlock();
throw ioe;
}
}
}
boolean hasMergeError() {
return (mergeErrorCount > 0);
}
int getMergeErrorCount() {
return mergeErrorCount;
}
void setMergeError() {
mergeErrorCount++;
}
void clearMergeError() {
mergeErrorCount = 0;
}
/**
* Ensure that the current/ directory exists in all storage
* directories
*/
void ensureCurrentDirExists() throws IOException {
for (Iterator<StorageDirectory> it
= storage.dirIterator(); it.hasNext();) {
StorageDirectory sd = it.next();
File curDir = sd.getCurrentDir();
if (!curDir.exists() && !curDir.mkdirs()) {
throw new IOException("Could not create directory " + curDir);
}
}
}
void deleteTempEdits() throws IOException {
FilenameFilter filter = new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.matches(NameNodeFile.EDITS_TMP.getName()
+ "_(\\d+)-(\\d+)_(\\d+)");
}
};
Iterator<StorageDirectory> it = storage.dirIterator(NameNodeDirType.EDITS);
for (;it.hasNext();) {
StorageDirectory dir = it.next();
File[] tempEdits = dir.getCurrentDir().listFiles(filter);
if (tempEdits != null) {
for (File t : tempEdits) {
boolean success = t.delete();
if (!success) {
LOG.warn("Failed to delete temporary edits file: "
+ t.getAbsolutePath());
}
}
}
}
}
}
static void doMerge(
CheckpointSignature sig, RemoteEditLogManifest manifest,
boolean loadImage, FSImage dstImage, FSNamesystem dstNamesystem)
throws IOException {
NNStorage dstStorage = dstImage.getStorage();
dstStorage.setStorageInfo(sig);
if (loadImage) {
File file = dstStorage.findImageFile(NameNodeFile.IMAGE,
sig.mostRecentCheckpointTxId);
if (file == null) {
throw new IOException("Couldn't find image file at txid " +
sig.mostRecentCheckpointTxId + " even though it should have " +
"just been downloaded");
}
dstNamesystem.writeLock();
try {
dstImage.reloadFromImageFile(file, dstNamesystem);
} finally {
dstNamesystem.writeUnlock();
}
dstNamesystem.imageLoadComplete();
}
// error simulation code for junit test
CheckpointFaultInjector.getInstance().duringMerge();
Checkpointer.rollForwardByApplyingLogs(manifest, dstImage, dstNamesystem);
// The following has the side effect of purging old fsimages/edit logs.
dstImage.saveFSImageInAllDirs(dstNamesystem, dstImage.getLastAppliedTxId());
dstStorage.writeAll();
}
}
|
googleapis/google-cloud-java | 37,005 | java-kms/proto-google-cloud-kms-v1/src/main/java/com/google/cloud/kms/v1/CreateKeyHandleRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/kms/v1/autokey.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.kms.v1;
/**
*
*
* <pre>
* Request message for
* [Autokey.CreateKeyHandle][google.cloud.kms.v1.Autokey.CreateKeyHandle].
* </pre>
*
* Protobuf type {@code google.cloud.kms.v1.CreateKeyHandleRequest}
*/
public final class CreateKeyHandleRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.kms.v1.CreateKeyHandleRequest)
CreateKeyHandleRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateKeyHandleRequest.newBuilder() to construct.
private CreateKeyHandleRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateKeyHandleRequest() {
parent_ = "";
keyHandleId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateKeyHandleRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.kms.v1.AutokeyProto
.internal_static_google_cloud_kms_v1_CreateKeyHandleRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.kms.v1.AutokeyProto
.internal_static_google_cloud_kms_v1_CreateKeyHandleRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.kms.v1.CreateKeyHandleRequest.class,
com.google.cloud.kms.v1.CreateKeyHandleRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Name of the resource project and location to create the
* [KeyHandle][google.cloud.kms.v1.KeyHandle] in, e.g.
* `projects/{PROJECT_ID}/locations/{LOCATION}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Name of the resource project and location to create the
* [KeyHandle][google.cloud.kms.v1.KeyHandle] in, e.g.
* `projects/{PROJECT_ID}/locations/{LOCATION}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int KEY_HANDLE_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object keyHandleId_ = "";
/**
*
*
* <pre>
* Optional. Id of the [KeyHandle][google.cloud.kms.v1.KeyHandle]. Must be
* unique to the resource project and location. If not provided by the caller,
* a new UUID is used.
* </pre>
*
* <code>string key_handle_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The keyHandleId.
*/
@java.lang.Override
public java.lang.String getKeyHandleId() {
java.lang.Object ref = keyHandleId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
keyHandleId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Id of the [KeyHandle][google.cloud.kms.v1.KeyHandle]. Must be
* unique to the resource project and location. If not provided by the caller,
* a new UUID is used.
* </pre>
*
* <code>string key_handle_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for keyHandleId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getKeyHandleIdBytes() {
java.lang.Object ref = keyHandleId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
keyHandleId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int KEY_HANDLE_FIELD_NUMBER = 3;
private com.google.cloud.kms.v1.KeyHandle keyHandle_;
/**
*
*
* <pre>
* Required. [KeyHandle][google.cloud.kms.v1.KeyHandle] to create.
* </pre>
*
* <code>.google.cloud.kms.v1.KeyHandle key_handle = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the keyHandle field is set.
*/
@java.lang.Override
public boolean hasKeyHandle() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. [KeyHandle][google.cloud.kms.v1.KeyHandle] to create.
* </pre>
*
* <code>.google.cloud.kms.v1.KeyHandle key_handle = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The keyHandle.
*/
@java.lang.Override
public com.google.cloud.kms.v1.KeyHandle getKeyHandle() {
return keyHandle_ == null ? com.google.cloud.kms.v1.KeyHandle.getDefaultInstance() : keyHandle_;
}
/**
*
*
* <pre>
* Required. [KeyHandle][google.cloud.kms.v1.KeyHandle] to create.
* </pre>
*
* <code>.google.cloud.kms.v1.KeyHandle key_handle = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.kms.v1.KeyHandleOrBuilder getKeyHandleOrBuilder() {
return keyHandle_ == null ? com.google.cloud.kms.v1.KeyHandle.getDefaultInstance() : keyHandle_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(keyHandleId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, keyHandleId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getKeyHandle());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(keyHandleId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, keyHandleId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getKeyHandle());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.kms.v1.CreateKeyHandleRequest)) {
return super.equals(obj);
}
com.google.cloud.kms.v1.CreateKeyHandleRequest other =
(com.google.cloud.kms.v1.CreateKeyHandleRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getKeyHandleId().equals(other.getKeyHandleId())) return false;
if (hasKeyHandle() != other.hasKeyHandle()) return false;
if (hasKeyHandle()) {
if (!getKeyHandle().equals(other.getKeyHandle())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + KEY_HANDLE_ID_FIELD_NUMBER;
hash = (53 * hash) + getKeyHandleId().hashCode();
if (hasKeyHandle()) {
hash = (37 * hash) + KEY_HANDLE_FIELD_NUMBER;
hash = (53 * hash) + getKeyHandle().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.kms.v1.CreateKeyHandleRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.kms.v1.CreateKeyHandleRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.kms.v1.CreateKeyHandleRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.kms.v1.CreateKeyHandleRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.kms.v1.CreateKeyHandleRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.kms.v1.CreateKeyHandleRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.kms.v1.CreateKeyHandleRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.kms.v1.CreateKeyHandleRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.kms.v1.CreateKeyHandleRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.kms.v1.CreateKeyHandleRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.kms.v1.CreateKeyHandleRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.kms.v1.CreateKeyHandleRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.kms.v1.CreateKeyHandleRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [Autokey.CreateKeyHandle][google.cloud.kms.v1.Autokey.CreateKeyHandle].
* </pre>
*
* Protobuf type {@code google.cloud.kms.v1.CreateKeyHandleRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.kms.v1.CreateKeyHandleRequest)
com.google.cloud.kms.v1.CreateKeyHandleRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.kms.v1.AutokeyProto
.internal_static_google_cloud_kms_v1_CreateKeyHandleRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.kms.v1.AutokeyProto
.internal_static_google_cloud_kms_v1_CreateKeyHandleRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.kms.v1.CreateKeyHandleRequest.class,
com.google.cloud.kms.v1.CreateKeyHandleRequest.Builder.class);
}
// Construct using com.google.cloud.kms.v1.CreateKeyHandleRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getKeyHandleFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
keyHandleId_ = "";
keyHandle_ = null;
if (keyHandleBuilder_ != null) {
keyHandleBuilder_.dispose();
keyHandleBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.kms.v1.AutokeyProto
.internal_static_google_cloud_kms_v1_CreateKeyHandleRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.kms.v1.CreateKeyHandleRequest getDefaultInstanceForType() {
return com.google.cloud.kms.v1.CreateKeyHandleRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.kms.v1.CreateKeyHandleRequest build() {
com.google.cloud.kms.v1.CreateKeyHandleRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.kms.v1.CreateKeyHandleRequest buildPartial() {
com.google.cloud.kms.v1.CreateKeyHandleRequest result =
new com.google.cloud.kms.v1.CreateKeyHandleRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.kms.v1.CreateKeyHandleRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.keyHandleId_ = keyHandleId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.keyHandle_ = keyHandleBuilder_ == null ? keyHandle_ : keyHandleBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.kms.v1.CreateKeyHandleRequest) {
return mergeFrom((com.google.cloud.kms.v1.CreateKeyHandleRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.kms.v1.CreateKeyHandleRequest other) {
if (other == com.google.cloud.kms.v1.CreateKeyHandleRequest.getDefaultInstance()) return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getKeyHandleId().isEmpty()) {
keyHandleId_ = other.keyHandleId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasKeyHandle()) {
mergeKeyHandle(other.getKeyHandle());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
keyHandleId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getKeyHandleFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Name of the resource project and location to create the
* [KeyHandle][google.cloud.kms.v1.KeyHandle] in, e.g.
* `projects/{PROJECT_ID}/locations/{LOCATION}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the resource project and location to create the
* [KeyHandle][google.cloud.kms.v1.KeyHandle] in, e.g.
* `projects/{PROJECT_ID}/locations/{LOCATION}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the resource project and location to create the
* [KeyHandle][google.cloud.kms.v1.KeyHandle] in, e.g.
* `projects/{PROJECT_ID}/locations/{LOCATION}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the resource project and location to create the
* [KeyHandle][google.cloud.kms.v1.KeyHandle] in, e.g.
* `projects/{PROJECT_ID}/locations/{LOCATION}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the resource project and location to create the
* [KeyHandle][google.cloud.kms.v1.KeyHandle] in, e.g.
* `projects/{PROJECT_ID}/locations/{LOCATION}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object keyHandleId_ = "";
/**
*
*
* <pre>
* Optional. Id of the [KeyHandle][google.cloud.kms.v1.KeyHandle]. Must be
* unique to the resource project and location. If not provided by the caller,
* a new UUID is used.
* </pre>
*
* <code>string key_handle_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The keyHandleId.
*/
public java.lang.String getKeyHandleId() {
java.lang.Object ref = keyHandleId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
keyHandleId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Id of the [KeyHandle][google.cloud.kms.v1.KeyHandle]. Must be
* unique to the resource project and location. If not provided by the caller,
* a new UUID is used.
* </pre>
*
* <code>string key_handle_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for keyHandleId.
*/
public com.google.protobuf.ByteString getKeyHandleIdBytes() {
java.lang.Object ref = keyHandleId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
keyHandleId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Id of the [KeyHandle][google.cloud.kms.v1.KeyHandle]. Must be
* unique to the resource project and location. If not provided by the caller,
* a new UUID is used.
* </pre>
*
* <code>string key_handle_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The keyHandleId to set.
* @return This builder for chaining.
*/
public Builder setKeyHandleId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
keyHandleId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Id of the [KeyHandle][google.cloud.kms.v1.KeyHandle]. Must be
* unique to the resource project and location. If not provided by the caller,
* a new UUID is used.
* </pre>
*
* <code>string key_handle_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearKeyHandleId() {
keyHandleId_ = getDefaultInstance().getKeyHandleId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Id of the [KeyHandle][google.cloud.kms.v1.KeyHandle]. Must be
* unique to the resource project and location. If not provided by the caller,
* a new UUID is used.
* </pre>
*
* <code>string key_handle_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for keyHandleId to set.
* @return This builder for chaining.
*/
public Builder setKeyHandleIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
keyHandleId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.kms.v1.KeyHandle keyHandle_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.kms.v1.KeyHandle,
com.google.cloud.kms.v1.KeyHandle.Builder,
com.google.cloud.kms.v1.KeyHandleOrBuilder>
keyHandleBuilder_;
/**
*
*
* <pre>
* Required. [KeyHandle][google.cloud.kms.v1.KeyHandle] to create.
* </pre>
*
* <code>
* .google.cloud.kms.v1.KeyHandle key_handle = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the keyHandle field is set.
*/
public boolean hasKeyHandle() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Required. [KeyHandle][google.cloud.kms.v1.KeyHandle] to create.
* </pre>
*
* <code>
* .google.cloud.kms.v1.KeyHandle key_handle = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The keyHandle.
*/
public com.google.cloud.kms.v1.KeyHandle getKeyHandle() {
if (keyHandleBuilder_ == null) {
return keyHandle_ == null
? com.google.cloud.kms.v1.KeyHandle.getDefaultInstance()
: keyHandle_;
} else {
return keyHandleBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. [KeyHandle][google.cloud.kms.v1.KeyHandle] to create.
* </pre>
*
* <code>
* .google.cloud.kms.v1.KeyHandle key_handle = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setKeyHandle(com.google.cloud.kms.v1.KeyHandle value) {
if (keyHandleBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
keyHandle_ = value;
} else {
keyHandleBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. [KeyHandle][google.cloud.kms.v1.KeyHandle] to create.
* </pre>
*
* <code>
* .google.cloud.kms.v1.KeyHandle key_handle = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setKeyHandle(com.google.cloud.kms.v1.KeyHandle.Builder builderForValue) {
if (keyHandleBuilder_ == null) {
keyHandle_ = builderForValue.build();
} else {
keyHandleBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. [KeyHandle][google.cloud.kms.v1.KeyHandle] to create.
* </pre>
*
* <code>
* .google.cloud.kms.v1.KeyHandle key_handle = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeKeyHandle(com.google.cloud.kms.v1.KeyHandle value) {
if (keyHandleBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& keyHandle_ != null
&& keyHandle_ != com.google.cloud.kms.v1.KeyHandle.getDefaultInstance()) {
getKeyHandleBuilder().mergeFrom(value);
} else {
keyHandle_ = value;
}
} else {
keyHandleBuilder_.mergeFrom(value);
}
if (keyHandle_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. [KeyHandle][google.cloud.kms.v1.KeyHandle] to create.
* </pre>
*
* <code>
* .google.cloud.kms.v1.KeyHandle key_handle = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearKeyHandle() {
bitField0_ = (bitField0_ & ~0x00000004);
keyHandle_ = null;
if (keyHandleBuilder_ != null) {
keyHandleBuilder_.dispose();
keyHandleBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. [KeyHandle][google.cloud.kms.v1.KeyHandle] to create.
* </pre>
*
* <code>
* .google.cloud.kms.v1.KeyHandle key_handle = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.kms.v1.KeyHandle.Builder getKeyHandleBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getKeyHandleFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. [KeyHandle][google.cloud.kms.v1.KeyHandle] to create.
* </pre>
*
* <code>
* .google.cloud.kms.v1.KeyHandle key_handle = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.kms.v1.KeyHandleOrBuilder getKeyHandleOrBuilder() {
if (keyHandleBuilder_ != null) {
return keyHandleBuilder_.getMessageOrBuilder();
} else {
return keyHandle_ == null
? com.google.cloud.kms.v1.KeyHandle.getDefaultInstance()
: keyHandle_;
}
}
/**
*
*
* <pre>
* Required. [KeyHandle][google.cloud.kms.v1.KeyHandle] to create.
* </pre>
*
* <code>
* .google.cloud.kms.v1.KeyHandle key_handle = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.kms.v1.KeyHandle,
com.google.cloud.kms.v1.KeyHandle.Builder,
com.google.cloud.kms.v1.KeyHandleOrBuilder>
getKeyHandleFieldBuilder() {
if (keyHandleBuilder_ == null) {
keyHandleBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.kms.v1.KeyHandle,
com.google.cloud.kms.v1.KeyHandle.Builder,
com.google.cloud.kms.v1.KeyHandleOrBuilder>(
getKeyHandle(), getParentForChildren(), isClean());
keyHandle_ = null;
}
return keyHandleBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.kms.v1.CreateKeyHandleRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.kms.v1.CreateKeyHandleRequest)
private static final com.google.cloud.kms.v1.CreateKeyHandleRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.kms.v1.CreateKeyHandleRequest();
}
public static com.google.cloud.kms.v1.CreateKeyHandleRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateKeyHandleRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateKeyHandleRequest>() {
@java.lang.Override
public CreateKeyHandleRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateKeyHandleRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateKeyHandleRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.kms.v1.CreateKeyHandleRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,014 | java-datastream/proto-google-cloud-datastream-v1alpha1/src/main/java/com/google/cloud/datastream/v1alpha1/DiscoverConnectionProfileResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datastream/v1alpha1/datastream.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datastream.v1alpha1;
/** Protobuf type {@code google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse} */
public final class DiscoverConnectionProfileResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse)
DiscoverConnectionProfileResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use DiscoverConnectionProfileResponse.newBuilder() to construct.
private DiscoverConnectionProfileResponse(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DiscoverConnectionProfileResponse() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new DiscoverConnectionProfileResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datastream.v1alpha1.CloudDatastreamServiceProto
.internal_static_google_cloud_datastream_v1alpha1_DiscoverConnectionProfileResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datastream.v1alpha1.CloudDatastreamServiceProto
.internal_static_google_cloud_datastream_v1alpha1_DiscoverConnectionProfileResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse.class,
com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse.Builder.class);
}
private int dataObjectCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object dataObject_;
public enum DataObjectCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
ORACLE_RDBMS(100),
MYSQL_RDBMS(101),
DATAOBJECT_NOT_SET(0);
private final int value;
private DataObjectCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static DataObjectCase valueOf(int value) {
return forNumber(value);
}
public static DataObjectCase forNumber(int value) {
switch (value) {
case 100:
return ORACLE_RDBMS;
case 101:
return MYSQL_RDBMS;
case 0:
return DATAOBJECT_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public DataObjectCase getDataObjectCase() {
return DataObjectCase.forNumber(dataObjectCase_);
}
public static final int ORACLE_RDBMS_FIELD_NUMBER = 100;
/**
*
*
* <pre>
* Enriched Oracle RDBMS object.
* </pre>
*
* <code>.google.cloud.datastream.v1alpha1.OracleRdbms oracle_rdbms = 100;</code>
*
* @return Whether the oracleRdbms field is set.
*/
@java.lang.Override
public boolean hasOracleRdbms() {
return dataObjectCase_ == 100;
}
/**
*
*
* <pre>
* Enriched Oracle RDBMS object.
* </pre>
*
* <code>.google.cloud.datastream.v1alpha1.OracleRdbms oracle_rdbms = 100;</code>
*
* @return The oracleRdbms.
*/
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.OracleRdbms getOracleRdbms() {
if (dataObjectCase_ == 100) {
return (com.google.cloud.datastream.v1alpha1.OracleRdbms) dataObject_;
}
return com.google.cloud.datastream.v1alpha1.OracleRdbms.getDefaultInstance();
}
/**
*
*
* <pre>
* Enriched Oracle RDBMS object.
* </pre>
*
* <code>.google.cloud.datastream.v1alpha1.OracleRdbms oracle_rdbms = 100;</code>
*/
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.OracleRdbmsOrBuilder getOracleRdbmsOrBuilder() {
if (dataObjectCase_ == 100) {
return (com.google.cloud.datastream.v1alpha1.OracleRdbms) dataObject_;
}
return com.google.cloud.datastream.v1alpha1.OracleRdbms.getDefaultInstance();
}
public static final int MYSQL_RDBMS_FIELD_NUMBER = 101;
/**
*
*
* <pre>
* Enriched MySQL RDBMS object.
* </pre>
*
* <code>.google.cloud.datastream.v1alpha1.MysqlRdbms mysql_rdbms = 101;</code>
*
* @return Whether the mysqlRdbms field is set.
*/
@java.lang.Override
public boolean hasMysqlRdbms() {
return dataObjectCase_ == 101;
}
/**
*
*
* <pre>
* Enriched MySQL RDBMS object.
* </pre>
*
* <code>.google.cloud.datastream.v1alpha1.MysqlRdbms mysql_rdbms = 101;</code>
*
* @return The mysqlRdbms.
*/
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.MysqlRdbms getMysqlRdbms() {
if (dataObjectCase_ == 101) {
return (com.google.cloud.datastream.v1alpha1.MysqlRdbms) dataObject_;
}
return com.google.cloud.datastream.v1alpha1.MysqlRdbms.getDefaultInstance();
}
/**
*
*
* <pre>
* Enriched MySQL RDBMS object.
* </pre>
*
* <code>.google.cloud.datastream.v1alpha1.MysqlRdbms mysql_rdbms = 101;</code>
*/
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.MysqlRdbmsOrBuilder getMysqlRdbmsOrBuilder() {
if (dataObjectCase_ == 101) {
return (com.google.cloud.datastream.v1alpha1.MysqlRdbms) dataObject_;
}
return com.google.cloud.datastream.v1alpha1.MysqlRdbms.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (dataObjectCase_ == 100) {
output.writeMessage(100, (com.google.cloud.datastream.v1alpha1.OracleRdbms) dataObject_);
}
if (dataObjectCase_ == 101) {
output.writeMessage(101, (com.google.cloud.datastream.v1alpha1.MysqlRdbms) dataObject_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (dataObjectCase_ == 100) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
100, (com.google.cloud.datastream.v1alpha1.OracleRdbms) dataObject_);
}
if (dataObjectCase_ == 101) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
101, (com.google.cloud.datastream.v1alpha1.MysqlRdbms) dataObject_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse)) {
return super.equals(obj);
}
com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse other =
(com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse) obj;
if (!getDataObjectCase().equals(other.getDataObjectCase())) return false;
switch (dataObjectCase_) {
case 100:
if (!getOracleRdbms().equals(other.getOracleRdbms())) return false;
break;
case 101:
if (!getMysqlRdbms().equals(other.getMysqlRdbms())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
switch (dataObjectCase_) {
case 100:
hash = (37 * hash) + ORACLE_RDBMS_FIELD_NUMBER;
hash = (53 * hash) + getOracleRdbms().hashCode();
break;
case 101:
hash = (37 * hash) + MYSQL_RDBMS_FIELD_NUMBER;
hash = (53 * hash) + getMysqlRdbms().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/** Protobuf type {@code google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse} */
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse)
com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datastream.v1alpha1.CloudDatastreamServiceProto
.internal_static_google_cloud_datastream_v1alpha1_DiscoverConnectionProfileResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datastream.v1alpha1.CloudDatastreamServiceProto
.internal_static_google_cloud_datastream_v1alpha1_DiscoverConnectionProfileResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse.class,
com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse.Builder.class);
}
// Construct using
// com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (oracleRdbmsBuilder_ != null) {
oracleRdbmsBuilder_.clear();
}
if (mysqlRdbmsBuilder_ != null) {
mysqlRdbmsBuilder_.clear();
}
dataObjectCase_ = 0;
dataObject_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datastream.v1alpha1.CloudDatastreamServiceProto
.internal_static_google_cloud_datastream_v1alpha1_DiscoverConnectionProfileResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse
getDefaultInstanceForType() {
return com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse build() {
com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse buildPartial() {
com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse result =
new com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse result) {
int from_bitField0_ = bitField0_;
}
private void buildPartialOneofs(
com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse result) {
result.dataObjectCase_ = dataObjectCase_;
result.dataObject_ = this.dataObject_;
if (dataObjectCase_ == 100 && oracleRdbmsBuilder_ != null) {
result.dataObject_ = oracleRdbmsBuilder_.build();
}
if (dataObjectCase_ == 101 && mysqlRdbmsBuilder_ != null) {
result.dataObject_ = mysqlRdbmsBuilder_.build();
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse) {
return mergeFrom(
(com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse other) {
if (other
== com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse
.getDefaultInstance()) return this;
switch (other.getDataObjectCase()) {
case ORACLE_RDBMS:
{
mergeOracleRdbms(other.getOracleRdbms());
break;
}
case MYSQL_RDBMS:
{
mergeMysqlRdbms(other.getMysqlRdbms());
break;
}
case DATAOBJECT_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 802:
{
input.readMessage(getOracleRdbmsFieldBuilder().getBuilder(), extensionRegistry);
dataObjectCase_ = 100;
break;
} // case 802
case 810:
{
input.readMessage(getMysqlRdbmsFieldBuilder().getBuilder(), extensionRegistry);
dataObjectCase_ = 101;
break;
} // case 810
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int dataObjectCase_ = 0;
private java.lang.Object dataObject_;
public DataObjectCase getDataObjectCase() {
return DataObjectCase.forNumber(dataObjectCase_);
}
public Builder clearDataObject() {
dataObjectCase_ = 0;
dataObject_ = null;
onChanged();
return this;
}
private int bitField0_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datastream.v1alpha1.OracleRdbms,
com.google.cloud.datastream.v1alpha1.OracleRdbms.Builder,
com.google.cloud.datastream.v1alpha1.OracleRdbmsOrBuilder>
oracleRdbmsBuilder_;
/**
*
*
* <pre>
* Enriched Oracle RDBMS object.
* </pre>
*
* <code>.google.cloud.datastream.v1alpha1.OracleRdbms oracle_rdbms = 100;</code>
*
* @return Whether the oracleRdbms field is set.
*/
@java.lang.Override
public boolean hasOracleRdbms() {
return dataObjectCase_ == 100;
}
/**
*
*
* <pre>
* Enriched Oracle RDBMS object.
* </pre>
*
* <code>.google.cloud.datastream.v1alpha1.OracleRdbms oracle_rdbms = 100;</code>
*
* @return The oracleRdbms.
*/
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.OracleRdbms getOracleRdbms() {
if (oracleRdbmsBuilder_ == null) {
if (dataObjectCase_ == 100) {
return (com.google.cloud.datastream.v1alpha1.OracleRdbms) dataObject_;
}
return com.google.cloud.datastream.v1alpha1.OracleRdbms.getDefaultInstance();
} else {
if (dataObjectCase_ == 100) {
return oracleRdbmsBuilder_.getMessage();
}
return com.google.cloud.datastream.v1alpha1.OracleRdbms.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Enriched Oracle RDBMS object.
* </pre>
*
* <code>.google.cloud.datastream.v1alpha1.OracleRdbms oracle_rdbms = 100;</code>
*/
public Builder setOracleRdbms(com.google.cloud.datastream.v1alpha1.OracleRdbms value) {
if (oracleRdbmsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
dataObject_ = value;
onChanged();
} else {
oracleRdbmsBuilder_.setMessage(value);
}
dataObjectCase_ = 100;
return this;
}
/**
*
*
* <pre>
* Enriched Oracle RDBMS object.
* </pre>
*
* <code>.google.cloud.datastream.v1alpha1.OracleRdbms oracle_rdbms = 100;</code>
*/
public Builder setOracleRdbms(
com.google.cloud.datastream.v1alpha1.OracleRdbms.Builder builderForValue) {
if (oracleRdbmsBuilder_ == null) {
dataObject_ = builderForValue.build();
onChanged();
} else {
oracleRdbmsBuilder_.setMessage(builderForValue.build());
}
dataObjectCase_ = 100;
return this;
}
/**
*
*
* <pre>
* Enriched Oracle RDBMS object.
* </pre>
*
* <code>.google.cloud.datastream.v1alpha1.OracleRdbms oracle_rdbms = 100;</code>
*/
public Builder mergeOracleRdbms(com.google.cloud.datastream.v1alpha1.OracleRdbms value) {
if (oracleRdbmsBuilder_ == null) {
if (dataObjectCase_ == 100
&& dataObject_
!= com.google.cloud.datastream.v1alpha1.OracleRdbms.getDefaultInstance()) {
dataObject_ =
com.google.cloud.datastream.v1alpha1.OracleRdbms.newBuilder(
(com.google.cloud.datastream.v1alpha1.OracleRdbms) dataObject_)
.mergeFrom(value)
.buildPartial();
} else {
dataObject_ = value;
}
onChanged();
} else {
if (dataObjectCase_ == 100) {
oracleRdbmsBuilder_.mergeFrom(value);
} else {
oracleRdbmsBuilder_.setMessage(value);
}
}
dataObjectCase_ = 100;
return this;
}
/**
*
*
* <pre>
* Enriched Oracle RDBMS object.
* </pre>
*
* <code>.google.cloud.datastream.v1alpha1.OracleRdbms oracle_rdbms = 100;</code>
*/
public Builder clearOracleRdbms() {
if (oracleRdbmsBuilder_ == null) {
if (dataObjectCase_ == 100) {
dataObjectCase_ = 0;
dataObject_ = null;
onChanged();
}
} else {
if (dataObjectCase_ == 100) {
dataObjectCase_ = 0;
dataObject_ = null;
}
oracleRdbmsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Enriched Oracle RDBMS object.
* </pre>
*
* <code>.google.cloud.datastream.v1alpha1.OracleRdbms oracle_rdbms = 100;</code>
*/
public com.google.cloud.datastream.v1alpha1.OracleRdbms.Builder getOracleRdbmsBuilder() {
return getOracleRdbmsFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Enriched Oracle RDBMS object.
* </pre>
*
* <code>.google.cloud.datastream.v1alpha1.OracleRdbms oracle_rdbms = 100;</code>
*/
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.OracleRdbmsOrBuilder getOracleRdbmsOrBuilder() {
if ((dataObjectCase_ == 100) && (oracleRdbmsBuilder_ != null)) {
return oracleRdbmsBuilder_.getMessageOrBuilder();
} else {
if (dataObjectCase_ == 100) {
return (com.google.cloud.datastream.v1alpha1.OracleRdbms) dataObject_;
}
return com.google.cloud.datastream.v1alpha1.OracleRdbms.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Enriched Oracle RDBMS object.
* </pre>
*
* <code>.google.cloud.datastream.v1alpha1.OracleRdbms oracle_rdbms = 100;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datastream.v1alpha1.OracleRdbms,
com.google.cloud.datastream.v1alpha1.OracleRdbms.Builder,
com.google.cloud.datastream.v1alpha1.OracleRdbmsOrBuilder>
getOracleRdbmsFieldBuilder() {
if (oracleRdbmsBuilder_ == null) {
if (!(dataObjectCase_ == 100)) {
dataObject_ = com.google.cloud.datastream.v1alpha1.OracleRdbms.getDefaultInstance();
}
oracleRdbmsBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datastream.v1alpha1.OracleRdbms,
com.google.cloud.datastream.v1alpha1.OracleRdbms.Builder,
com.google.cloud.datastream.v1alpha1.OracleRdbmsOrBuilder>(
(com.google.cloud.datastream.v1alpha1.OracleRdbms) dataObject_,
getParentForChildren(),
isClean());
dataObject_ = null;
}
dataObjectCase_ = 100;
onChanged();
return oracleRdbmsBuilder_;
}
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datastream.v1alpha1.MysqlRdbms,
com.google.cloud.datastream.v1alpha1.MysqlRdbms.Builder,
com.google.cloud.datastream.v1alpha1.MysqlRdbmsOrBuilder>
mysqlRdbmsBuilder_;
/**
*
*
* <pre>
* Enriched MySQL RDBMS object.
* </pre>
*
* <code>.google.cloud.datastream.v1alpha1.MysqlRdbms mysql_rdbms = 101;</code>
*
* @return Whether the mysqlRdbms field is set.
*/
@java.lang.Override
public boolean hasMysqlRdbms() {
return dataObjectCase_ == 101;
}
/**
*
*
* <pre>
* Enriched MySQL RDBMS object.
* </pre>
*
* <code>.google.cloud.datastream.v1alpha1.MysqlRdbms mysql_rdbms = 101;</code>
*
* @return The mysqlRdbms.
*/
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.MysqlRdbms getMysqlRdbms() {
if (mysqlRdbmsBuilder_ == null) {
if (dataObjectCase_ == 101) {
return (com.google.cloud.datastream.v1alpha1.MysqlRdbms) dataObject_;
}
return com.google.cloud.datastream.v1alpha1.MysqlRdbms.getDefaultInstance();
} else {
if (dataObjectCase_ == 101) {
return mysqlRdbmsBuilder_.getMessage();
}
return com.google.cloud.datastream.v1alpha1.MysqlRdbms.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Enriched MySQL RDBMS object.
* </pre>
*
* <code>.google.cloud.datastream.v1alpha1.MysqlRdbms mysql_rdbms = 101;</code>
*/
public Builder setMysqlRdbms(com.google.cloud.datastream.v1alpha1.MysqlRdbms value) {
if (mysqlRdbmsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
dataObject_ = value;
onChanged();
} else {
mysqlRdbmsBuilder_.setMessage(value);
}
dataObjectCase_ = 101;
return this;
}
/**
*
*
* <pre>
* Enriched MySQL RDBMS object.
* </pre>
*
* <code>.google.cloud.datastream.v1alpha1.MysqlRdbms mysql_rdbms = 101;</code>
*/
public Builder setMysqlRdbms(
com.google.cloud.datastream.v1alpha1.MysqlRdbms.Builder builderForValue) {
if (mysqlRdbmsBuilder_ == null) {
dataObject_ = builderForValue.build();
onChanged();
} else {
mysqlRdbmsBuilder_.setMessage(builderForValue.build());
}
dataObjectCase_ = 101;
return this;
}
/**
*
*
* <pre>
* Enriched MySQL RDBMS object.
* </pre>
*
* <code>.google.cloud.datastream.v1alpha1.MysqlRdbms mysql_rdbms = 101;</code>
*/
public Builder mergeMysqlRdbms(com.google.cloud.datastream.v1alpha1.MysqlRdbms value) {
if (mysqlRdbmsBuilder_ == null) {
if (dataObjectCase_ == 101
&& dataObject_
!= com.google.cloud.datastream.v1alpha1.MysqlRdbms.getDefaultInstance()) {
dataObject_ =
com.google.cloud.datastream.v1alpha1.MysqlRdbms.newBuilder(
(com.google.cloud.datastream.v1alpha1.MysqlRdbms) dataObject_)
.mergeFrom(value)
.buildPartial();
} else {
dataObject_ = value;
}
onChanged();
} else {
if (dataObjectCase_ == 101) {
mysqlRdbmsBuilder_.mergeFrom(value);
} else {
mysqlRdbmsBuilder_.setMessage(value);
}
}
dataObjectCase_ = 101;
return this;
}
/**
*
*
* <pre>
* Enriched MySQL RDBMS object.
* </pre>
*
* <code>.google.cloud.datastream.v1alpha1.MysqlRdbms mysql_rdbms = 101;</code>
*/
public Builder clearMysqlRdbms() {
if (mysqlRdbmsBuilder_ == null) {
if (dataObjectCase_ == 101) {
dataObjectCase_ = 0;
dataObject_ = null;
onChanged();
}
} else {
if (dataObjectCase_ == 101) {
dataObjectCase_ = 0;
dataObject_ = null;
}
mysqlRdbmsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Enriched MySQL RDBMS object.
* </pre>
*
* <code>.google.cloud.datastream.v1alpha1.MysqlRdbms mysql_rdbms = 101;</code>
*/
public com.google.cloud.datastream.v1alpha1.MysqlRdbms.Builder getMysqlRdbmsBuilder() {
return getMysqlRdbmsFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Enriched MySQL RDBMS object.
* </pre>
*
* <code>.google.cloud.datastream.v1alpha1.MysqlRdbms mysql_rdbms = 101;</code>
*/
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.MysqlRdbmsOrBuilder getMysqlRdbmsOrBuilder() {
if ((dataObjectCase_ == 101) && (mysqlRdbmsBuilder_ != null)) {
return mysqlRdbmsBuilder_.getMessageOrBuilder();
} else {
if (dataObjectCase_ == 101) {
return (com.google.cloud.datastream.v1alpha1.MysqlRdbms) dataObject_;
}
return com.google.cloud.datastream.v1alpha1.MysqlRdbms.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Enriched MySQL RDBMS object.
* </pre>
*
* <code>.google.cloud.datastream.v1alpha1.MysqlRdbms mysql_rdbms = 101;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datastream.v1alpha1.MysqlRdbms,
com.google.cloud.datastream.v1alpha1.MysqlRdbms.Builder,
com.google.cloud.datastream.v1alpha1.MysqlRdbmsOrBuilder>
getMysqlRdbmsFieldBuilder() {
if (mysqlRdbmsBuilder_ == null) {
if (!(dataObjectCase_ == 101)) {
dataObject_ = com.google.cloud.datastream.v1alpha1.MysqlRdbms.getDefaultInstance();
}
mysqlRdbmsBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datastream.v1alpha1.MysqlRdbms,
com.google.cloud.datastream.v1alpha1.MysqlRdbms.Builder,
com.google.cloud.datastream.v1alpha1.MysqlRdbmsOrBuilder>(
(com.google.cloud.datastream.v1alpha1.MysqlRdbms) dataObject_,
getParentForChildren(),
isClean());
dataObject_ = null;
}
dataObjectCase_ = 101;
onChanged();
return mysqlRdbmsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse)
private static final com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse();
}
public static com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DiscoverConnectionProfileResponse> PARSER =
new com.google.protobuf.AbstractParser<DiscoverConnectionProfileResponse>() {
@java.lang.Override
public DiscoverConnectionProfileResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<DiscoverConnectionProfileResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DiscoverConnectionProfileResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.DiscoverConnectionProfileResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/harmony | 34,921 | classlib/modules/jndi/src/test/java/org/apache/harmony/jndi/tests/javax/naming/directory/BasicAttributeTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.harmony.jndi.tests.javax.naming.directory;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.Date;
import java.util.NoSuchElementException;
import java.util.Random;
import javax.naming.NamingEnumeration;
import javax.naming.NamingException;
import javax.naming.OperationNotSupportedException;
import javax.naming.directory.BasicAttribute;
import junit.framework.TestCase;
import org.apache.harmony.jndi.tests.javax.naming.util.Person;
public class BasicAttributeTest extends TestCase {
private BasicAttribute orderedAttribute;
private BasicAttribute unorderedAttribute;
@Override
protected void setUp() {
orderedAttribute = new BasicAttribute("Ordered_Attribute", true);
unorderedAttribute = new BasicAttribute("Unordered_Attribute", false);
}
/**
* Test BasicAttribute constructor 1) use a specified ID 2) the default
* order flag is set to false 3) contain zero value.
*/
public void testConstructor_ByID() {
String ID = "attribute one";
BasicAttribute attribute = new BasicAttribute(ID);
assertEquals(ID, attribute.getID());
assertFalse(attribute.isOrdered());
assertEquals(0, attribute.size());
}
/**
* Test BasicAttribute constructor with null ID
*/
public void testConstructor_ByIDNull() {
BasicAttribute attribute = new BasicAttribute(null);
assertNull(attribute.getID());
}
/**
* Test BasicAttribute constructor 1) use a specified ID 2) use a specified
* order flag 3) contain zero value.
*/
public void testConstructor_ByIDOrderFlag() {
String ID = "attribute two";
boolean flag = false;
BasicAttribute attribute = new BasicAttribute(ID, flag);
assertEquals(ID, attribute.getID());
assertEquals(flag, attribute.isOrdered());
assertEquals(0, attribute.size());
ID = "attribute three";
flag = true;
attribute = new BasicAttribute(ID, flag);
assertEquals(ID, attribute.getID());
assertEquals(flag, attribute.isOrdered());
assertEquals(0, attribute.size());
}
/**
* Test BasicAttribute constructor 1) use a specified ID 2) the default
* order flag is set to false 3) specify a initial value
*/
public void testConstructor_ByIDInitialValue() throws NamingException {
String ID = "attribute four";
Date date = new Date();
BasicAttribute attribute = new BasicAttribute(ID, date);
assertEquals(ID, attribute.getID());
assertFalse(attribute.isOrdered());
assertEquals(date, attribute.get());
}
/**
* Test BasicAttribute constructor 1) use a specified ID 2) use a specified
* order flag 3) specify a initial value
*/
public void testConstructor_ByIDOrderFlagInitialValue()
throws NamingException {
String ID = "attribute five";
boolean flag = true;
Date date = new Date();
BasicAttribute attribute = new BasicAttribute(ID, date, flag);
assertEquals(ID, attribute.getID());
assertEquals(flag, attribute.isOrdered());
assertEquals(date, attribute.get());
}
/**
* test add a simple object through add()
*/
public void testAdd_unorder_Simple() throws NamingException {
int count = 5;
Person[] persons = new Person[count];
for (int i = 0; i < count; i++) {
persons[i] = Person.getInstance();
assertTrue(unorderedAttribute.add(persons[i]));
}
for (int i = 0; i < count; i++) {
assertSame(persons[i], unorderedAttribute.get(i));
}
assertEquals(count, unorderedAttribute.size());
}
public void testAdd_unorder_ExistingValue()
throws CloneNotSupportedException, NamingException {
Person person = Person.getInstance();
Person clonePerson = (Person) person.clone();
unorderedAttribute.add(person);
assertFalse(unorderedAttribute.add(clonePerson));
assertEquals(1, unorderedAttribute.size());
assertEquals(clonePerson, unorderedAttribute.get(0));
}
public void testAdd_unordered_ExistingValueArray() {
String[] team = { "Blue", "Yellow", "Red", };
String[] newTeam = new String[team.length];
System.arraycopy(team, 0, newTeam, 0, team.length);
unorderedAttribute.add(team);
assertFalse(unorderedAttribute.add(newTeam));
assertEquals(1, unorderedAttribute.size());
}
public void testAdd_unorder_valueNull() throws NamingException {
assertTrue(unorderedAttribute.add(null));
assertNull(unorderedAttribute.get(0));
}
public void testAdd_unorder_ExistingNull() throws NamingException {
assertTrue(unorderedAttribute.add(null));
assertFalse(unorderedAttribute.add(null));
assertEquals(1, unorderedAttribute.size());
assertNull(unorderedAttribute.get(0));
}
public void testAdd_order_Simple() throws NamingException {
int count = 5;
Person[] persons = new Person[count];
for (int i = 0; i < count; i++) {
persons[i] = Person.getInstance();
assertTrue(orderedAttribute.add(persons[i]));
}
for (int i = 0; i < count; i++) {
assertSame(persons[i], orderedAttribute.get(i));
}
assertEquals(count, orderedAttribute.size());
}
public void testAdd_order_ExistingValue() throws NamingException,
CloneNotSupportedException {
Person person = Person.getInstance();
Person clonePerson = (Person) person.clone();
assertTrue(orderedAttribute.add(person));
assertTrue(orderedAttribute.add(clonePerson));
assertEquals(2, orderedAttribute.size());
assertEquals(orderedAttribute.get(0), orderedAttribute.get(1));
}
public void testAdd_order_ValueNull() {
int count = 5;
for (int i = 0; i < count; i++) {
assertTrue(orderedAttribute.add(null));
}
assertEquals(count, orderedAttribute.size());
}
/**
* Test void add(int location, Object val)
*/
public void testAdd2_order_Simple() throws NamingException {
int count = 5;
Person[] persons = new Person[count];
for (int i = 0; i < count; i++) {
persons[i] = Person.getInstance();
orderedAttribute.add(i, persons[i]);
}
for (int i = 0; i < count; i++) {
assertEquals(persons[i], orderedAttribute.get(i));
}
}
public void testAdd2_order_ExistValue() throws NamingException {
String value0 = "string value";
String value1 = "another string value";
orderedAttribute.add(0, value0);
orderedAttribute.add(0, value1);
assertEquals(2, orderedAttribute.size());
assertEquals(value1, orderedAttribute.get(0));
assertEquals(value0, orderedAttribute.get(1));
}
public void testAdd2_order_ValueNull() throws NamingException {
orderedAttribute.add(0, null);
orderedAttribute.add(0, null);
assertEquals(2, orderedAttribute.size());
assertNull(orderedAttribute.get(0));
assertNull(orderedAttribute.get(1));
}
public void testAdd2_order_OutOfRangeLess() throws NamingException {
try {
orderedAttribute.add(-1, "Index is -1");
fail("add(-1, value) should throw IndexOutOfBoundsException.");
} catch (IndexOutOfBoundsException e) {
}
}
public void testAdd2_order_OutOfRangeOver() throws NamingException {
try {
orderedAttribute.add(orderedAttribute.size() + 1,
"Index is size() + 1");
fail("add(size() + 1, value) should throw IndexOutOfBoundsException.");
} catch (IndexOutOfBoundsException e) {
}
}
public void testAdd2_unorder_Simple() throws NamingException {
int count = 5;
Person[] persons = new Person[count];
for (int i = 0; i < count; i++) {
persons[i] = Person.getInstance();
unorderedAttribute.add(i, persons[i]);
}
for (int i = 0; i < count; i++) {
assertEquals(persons[i], unorderedAttribute.get(i));
}
}
public void testAdd2_unorder_ExistValue() throws NamingException {
String value = "string value";
unorderedAttribute.add(0, value);
try {
unorderedAttribute.add(0, value);
fail("An value already exist, throw IllegalStateException.");
} catch (IllegalStateException e) {
}
assertEquals(1, unorderedAttribute.size());
}
public void testAdd2_unorder_ExistValueArray() {
int count = 5;
Person[] persons = new Person[count];
for (int i = 0; i < count; i++) {
persons[i] = Person.getInstance();
}
Person[] newPersons = new Person[count];
System.arraycopy(persons, 0, newPersons, 0, count);
unorderedAttribute.add(0, persons);
try {
unorderedAttribute.add(0, newPersons);
fail("An value already exist, should throw IllegalStateException.");
} catch (IllegalStateException e) {
}
}
public void testAdd2_unorder_ValueNull() throws NamingException {
unorderedAttribute.add(0, null);
try {
unorderedAttribute.add(0, null);
fail("An value already exist, should throw IllegalStateException.");
} catch (IllegalStateException e) {
}
assertEquals(1, unorderedAttribute.size());
}
public void testAdd2_unorder_OutOfRangeLess() throws NamingException {
try {
unorderedAttribute.add(-1, "Index is -1");
fail("add(-1, value) should throw IndexOutOfBoundsException.");
} catch (IndexOutOfBoundsException e) {
}
}
public void testAdd2_unorder_OutOfRangeOver() throws NamingException {
try {
unorderedAttribute.add(orderedAttribute.size() + 1,
"Index is size() + 1");
fail("add(size() + 1, value) should throw IndexOutOfBoundsException.");
} catch (IndexOutOfBoundsException e) {
}
}
/**
* test clear() add of the values.
*/
public void testClear() {
int count = 10;
for (int i = 0; i < count; i++) {
unorderedAttribute.add(new Integer(i));
orderedAttribute.add(new Integer(i));
}
assertEquals(count, unorderedAttribute.size());
assertEquals(count, orderedAttribute.size());
unorderedAttribute.clear();
orderedAttribute.clear();
assertEquals(0, unorderedAttribute.size());
assertEquals(0, orderedAttribute.size());
}
/**
* test clone()
*/
public void testClone_ordered() throws NamingException {
int count = 5;
Person[] persons = new Person[count];
for (int i = 0; i < count; i++) {
persons[i] = Person.getInstance();
orderedAttribute.add(persons[i]);
}
BasicAttribute cloneAttribute = (BasicAttribute) orderedAttribute
.clone();
for (int i = 0; i < count; i++) {
assertSame(orderedAttribute.get(i), cloneAttribute.get(i));
}
assertTrue(cloneAttribute.isOrdered());
assertEquals(orderedAttribute.getID(), cloneAttribute.getID());
// assertNotSame(orderedAttribute.values, cloneAttribute.values);
cloneAttribute.add("new object");
assertEquals(orderedAttribute.size() + 1, cloneAttribute.size());
}
public void testClone_unordered() throws NamingException {
int count = 5;
Person[] persons = new Person[count];
for (int i = 0; i < count; i++) {
persons[i] = Person.getInstance();
unorderedAttribute.add(persons[i]);
}
BasicAttribute cloneAttribute = (BasicAttribute) unorderedAttribute
.clone();
for (int i = 0; i < count; i++) {
assertSame(unorderedAttribute.get(i), cloneAttribute.get(i));
}
assertFalse(cloneAttribute.isOrdered());
assertEquals(unorderedAttribute.getID(), cloneAttribute.getID());
// assertNotSame(unorderedAttribute.values, cloneAttribute.values);
cloneAttribute.add("new object");
assertEquals(unorderedAttribute.size() + 1, cloneAttribute.size());
}
/**
* test contains
*/
public void testContains_unordered() {
int count = 5;
Person[] persons = new Person[count];
for (int i = 0; i < count; i++) {
persons[i] = Person.getInstance();
unorderedAttribute.add(persons[i]);
}
for (int i = 0; i < count; i++) {
assertTrue(unorderedAttribute.contains(persons[i]));
}
Person person = Person.getInstance();
assertFalse(unorderedAttribute.contains(person));
}
public void testContains_unordered_null() {
unorderedAttribute.add(null);
assertTrue(unorderedAttribute.contains(null));
}
public void testContains_unordered_array() {
int count = 5;
Person[] persons = new Person[count];
for (int i = 0; i < count; i++) {
persons[i] = Person.getInstance();
}
Person[] newPersons = new Person[count];
System.arraycopy(persons, 0, newPersons, 0, count);
unorderedAttribute.add(persons);
assertTrue(unorderedAttribute.contains(newPersons));
}
public void testContains_unordered_IntArray() {
int count = 5;
int[] numbers = new int[count];
for (int i = 0; i < count; i++) {
numbers[i] = i * 100;
}
int[] newNumbers = new int[count];
System.arraycopy(numbers, 0, newNumbers, 0, count);
unorderedAttribute.add(numbers);
assertTrue(unorderedAttribute.contains(newNumbers));
}
public void testContains_unordered_ArrayOfArray() {
Person person0 = Person.getInstance();
Person person1 = Person.getInstance();
Object[][] arrays = { { "Blue", "Yellow", "Red" },
{ person0, person1, },
{ new Integer(100), new Integer(200), new Integer(300), }, };
Object[][] newArrays = { { "Blue", "Yellow", "Red" },
{ person0, person1, },
{ new Integer(100), new Integer(200), new Integer(300), }, };
unorderedAttribute.add(arrays);
assertFalse(unorderedAttribute.contains(newArrays));
// TO DO: behavior of array of array
}
public void testContains_unordered_IntArray2() {
// TO DO: int array and integer array
int[] numbers = { 1, 2, 3, };
Integer[] integers = { new Integer(1), new Integer(2), new Integer(3), };
orderedAttribute.add(numbers);
assertFalse(orderedAttribute.contains(integers));
}
public void testContains_unordered_arraynull() {
// TO DO: int array and integer array
String[] strs = { "Blue", "Yellow", null, "Red", };
String[] newStrs = { "Blue", "Yellow", null, "Red", };
orderedAttribute.add(strs);
assertTrue(orderedAttribute.contains(newStrs));
}
public void testContains_unordered_IntShortArray() {
int[] ints = { 1, 2, 3, 4, };
short[] shorts = { 1, 2, 3, 4, };
orderedAttribute.add(ints);
assertFalse(orderedAttribute.contains(shorts));
// TO DO: how about int and short array
}
public void testContains_ordered() {
String value = "same value";
orderedAttribute.add(value);
orderedAttribute.add(value);
assertTrue(orderedAttribute.contains(value));
assertFalse(orderedAttribute.contains(value + "another value"));
}
public void testContains_ordered_null() {
orderedAttribute.add(null);
orderedAttribute.add(null);
assertTrue(orderedAttribute.contains(null));
}
public void testContains_ordered_array() {
int count = 5;
Person[] persons = new Person[count];
for (int i = 0; i < count; i++) {
persons[i] = Person.getInstance();
}
Person[] newPersons = new Person[count];
System.arraycopy(persons, 0, newPersons, 0, count);
orderedAttribute.add(persons);
assertTrue(orderedAttribute.contains(newPersons));
}
public void testGet_unordered() throws NamingException {
int count = 5;
Person[] persons = new Person[count];
for (int i = 0; i < count; i++) {
persons[i] = Person.getInstance();
unorderedAttribute.add(persons[i]);
}
assertEquals(unorderedAttribute.get(0), unorderedAttribute.get());
}
public void testGet_unordered_noValue() throws NamingException {
try {
unorderedAttribute.get();
fail("No value, throw NoSuchElementException.");
// return -> throw.
} catch (NoSuchElementException e) {
}
}
public void testGet_unordered_ValueNull() throws NamingException {
unorderedAttribute.add(null);
assertNull(unorderedAttribute.get());
}
public void testGet_ordered() throws NamingException {
int count = 5;
Person[] persons = new Person[count];
for (int i = 0; i < count; i++) {
persons[i] = Person.getInstance();
orderedAttribute.add(persons[i]);
}
assertEquals(orderedAttribute.get(0), orderedAttribute.get());
}
public void testGet_ordered_noValue() throws NamingException {
try {
orderedAttribute.get();
fail("No value, throw NoSuchElementException.");
// return -> throw.
} catch (NoSuchElementException e) {
}
}
public void testGet_ordered_ValueNull() throws NamingException {
orderedAttribute.add(null);
assertNull(orderedAttribute.get());
}
public void testGet2_undered_tooSmall() throws NamingException {
Person person = Person.getInstance();
unorderedAttribute.add(person);
try {
unorderedAttribute.get(-1);
fail("get(-1), throw IndexOutOfBoundsException.");
} catch (IndexOutOfBoundsException e) {
}
}
public void testGet2_undered_tooLarge() throws NamingException {
Person person = Person.getInstance();
unorderedAttribute.add(person);
try {
unorderedAttribute.get(unorderedAttribute.size());
fail("get(size()), throw IndexOutOfBoundsException.");
} catch (IndexOutOfBoundsException e) {
}
}
public void testGetAll_ordered() throws NamingException {
int count = 5;
Person[] persons = new Person[count];
for (int i = 0; i < count; i++) {
persons[i] = Person.getInstance();
orderedAttribute.add(persons[i]);
}
NamingEnumeration<?> enumeration = orderedAttribute.getAll();
int i = 0;
while (enumeration.hasMore()) {
assertEquals(persons[i++], enumeration.next());
}
}
public void testGetAll_ordered_noValue() throws NamingException {
NamingEnumeration<?> enumeration = orderedAttribute.getAll();
int count = 0;
while (enumeration.hasMore()) {
count++;
}
assertEquals(0, count);
}
public void testGetAll_unordered() throws NamingException {
int count = 5;
Person[] persons = new Person[count];
for (int i = 0; i < count; i++) {
persons[i] = Person.getInstance();
unorderedAttribute.add(persons[i]);
}
NamingEnumeration<?> enumeration = unorderedAttribute.getAll();
int i = 0;
while (enumeration.hasMore()) {
assertEquals(persons[i++], enumeration.next());
}
}
public void testGetAll_unordered_noValue() throws NamingException {
NamingEnumeration<?> enumeration = unorderedAttribute.getAll();
int count = 0;
while (enumeration.hasMore()) {
count++;
}
assertEquals(0, count);
}
public void testGetAttributeDefinition() throws NamingException {
try {
orderedAttribute.getAttributeDefinition();
fail("Should throw OperationNotSupportedException");
} catch (OperationNotSupportedException e) {
}
}
public void testGetAttributeSyntaxDefinition() throws NamingException {
try {
orderedAttribute.getAttributeSyntaxDefinition();
fail("Should throw OperationNotSupportedException");
} catch (OperationNotSupportedException e) {
}
}
public void testGetID() {
String ID = "attribute ID";
BasicAttribute attribute = new BasicAttribute(ID);
assertEquals(ID, attribute.getID());
}
public void testGetID_null() {
BasicAttribute attribute = new BasicAttribute(null);
assertNull(attribute.getID());
}
public void testIsOrdered() {
String ID = "ordered";
BasicAttribute attribute = new BasicAttribute(ID, true);
assertTrue(attribute.isOrdered());
}
public void testIsOrdered_false() {
String ID = "unordered";
BasicAttribute attribute = new BasicAttribute(ID);
assertFalse(attribute.isOrdered());
}
/**
* Object remove(int i)
*/
public void testRemove_simple() throws NamingException {
int count = 5;
Person[] persons = new Person[count];
for (int i = 0; i < count; i++) {
persons[i] = Person.getInstance();
unorderedAttribute.add(persons[i]);
}
assertEquals(persons[0], unorderedAttribute.remove(0));
for (int i = 0; i < count - 1; i++) {
assertSame(persons[i + 1], unorderedAttribute.get(i));
}
}
public void testRemove_novalue() {
try {
orderedAttribute.remove(0);
fail("Should throw IndexOutOfBoundsException.");
} catch (IndexOutOfBoundsException e) {
}
}
public void testRemove_tooSmall() {
orderedAttribute.add("value one");
try {
orderedAttribute.remove(-1);
fail("Should throw IndexOutOfBoundsException.");
} catch (IndexOutOfBoundsException e) {
}
}
public void testRemove_tooLarge() {
orderedAttribute.add("value one");
try {
orderedAttribute.remove(orderedAttribute.size());
fail("Should throw IndexOutOfBoundsException.");
} catch (IndexOutOfBoundsException e) {
}
}
/**
* TEST: boolean remove(Object obj)
*/
public void testRemove2_simple() {
int count = 5;
Person[] persons = new Person[count];
for (int i = 0; i < count; i++) {
persons[i] = Person.getInstance();
unorderedAttribute.add(persons[i]);
}
for (int i = 0; i < count; i++) {
assertTrue(unorderedAttribute.remove(persons[i]));
}
}
public void testRemove2_DuplicateValue() throws NamingException {
Person person = Person.getInstance();
orderedAttribute.add(0, person);
orderedAttribute.add(1, "signal");
orderedAttribute.add(2, person);
assertTrue(orderedAttribute.remove(person));
assertEquals(2, orderedAttribute.size());
assertEquals(person, orderedAttribute.get(1));
}
public void testRemove2_NotMatch() {
Person person = Person.getInstance();
unorderedAttribute.add(person);
Person person2 = Person.getInstance();
assertFalse(unorderedAttribute.remove(person2));
}
public void testRemove2_NoValue() {
assertFalse(unorderedAttribute.remove("Novalue"));
}
public void testRemove2_array() {
int count = 5;
Person[] persons = new Person[count];
for (int i = 0; i < count; i++) {
persons[i] = Person.getInstance();
}
Person[] newPersons = new Person[count];
System.arraycopy(persons, 0, newPersons, 0, count);
orderedAttribute.add(persons);
assertTrue(orderedAttribute.remove(newPersons));
}
public void testSet_ordered_Simple() throws NamingException {
Person person = Person.getInstance();
orderedAttribute.add(person);
Person person2 = Person.getInstance();
assertEquals(person, orderedAttribute.set(0, person2));
assertEquals(person2, orderedAttribute.get(0));
}
public void testSet_ordered_NewValueNull() throws NamingException {
Person person = Person.getInstance();
orderedAttribute.add(person);
assertEquals(person, orderedAttribute.set(0, null));
assertNull(orderedAttribute.get(0));
}
public void testSet_ordered_OldValueNull() throws NamingException {
orderedAttribute.add(null);
Person person = Person.getInstance();
assertNull(orderedAttribute.set(0, person));
assertEquals(person, orderedAttribute.get(0));
}
public void testSet_ordered_IndexTooSmall() {
orderedAttribute.add("value");
try {
orderedAttribute.remove(-1);
fail("Should throw IndexOutOfBoundsException.");
} catch (IndexOutOfBoundsException e) {
}
}
public void testSet_ordered_IndexTooLarge() {
orderedAttribute.add("value");
try {
orderedAttribute.remove(orderedAttribute.size());
fail("Should throw IndexOutOfBoundsException.");
} catch (IndexOutOfBoundsException e) {
}
}
public void testSet_order_ExistValue() throws NamingException {
Person person = Person.getInstance();
orderedAttribute.add(person);
assertEquals(person, orderedAttribute.set(0, person));
assertEquals(person, orderedAttribute.get(0));
}
public void testSet_unorder_ExistValue() {
Person person = Person.getInstance();
unorderedAttribute.add(person);
try {
unorderedAttribute.set(0, person);
fail("Should throw IllegalStateException.");
} catch (IllegalStateException e) {
}
}
public void testSet_unorder_ExistValueArray() {
int count = 5;
Person[] persons = new Person[count];
for (int i = 0; i < count; i++) {
persons[i] = Person.getInstance();
}
Person[] newPersons = new Person[count];
System.arraycopy(persons, 0, newPersons, 0, count);
unorderedAttribute.add(persons);
try {
unorderedAttribute.set(0, newPersons);
fail("Should throw IllegalStateException.");
} catch (IllegalStateException e) {
}
}
public void testSize() {
assertEquals(0, orderedAttribute.size());
int count = 5;
for (int i = 0; i < count; i++) {
orderedAttribute.add("value" + i);
}
assertEquals(count, orderedAttribute.size());
orderedAttribute.clear();
assertEquals(0, orderedAttribute.size());
}
/**
* test equals
*/
public void testEquals() throws CloneNotSupportedException {
String ID = "equals";
Person person = Person.getInstance();
Person personClone = (Person) person.clone();
BasicAttribute attribute0 = new BasicAttribute(ID);
attribute0.add(person);
BasicAttribute attribute1 = new BasicAttribute(ID);
attribute1.add(personClone);
assertTrue(attribute0.equals(attribute1));
assertTrue(attribute1.equals(attribute0));
assertFalse(attribute0.equals(null));
}
public void testEquals_Array() throws CloneNotSupportedException {
int count = 5;
Person[] persons = new Person[count];
for (int i = 0; i < count; i++) {
persons[i] = Person.getInstance();
}
Person[] newPersons = new Person[count];
System.arraycopy(persons, 0, newPersons, 0, count);
String id = "Array Attribute";
BasicAttribute attribute0 = new BasicAttribute(id, persons, true);
BasicAttribute attribute1 = new BasicAttribute(id, newPersons, true);
assertTrue(attribute0.equals(attribute1));
assertTrue(attribute1.equals(attribute0));
assertFalse(attribute0.equals(null));
}
/**
* test equals with different IDs
*
*/
public void testNotEquals_ByID() {
String ID = "equals";
String ID2 = "not equals";
BasicAttribute attribute0 = new BasicAttribute(ID);
BasicAttribute attribute1 = new BasicAttribute(ID2);
assertFalse(attribute0.equals(attribute1));
}
/**
* test equals with different ordering setting
*/
public void testNotEquals_ByOrderFlag() {
String ID = "not equals";
Person person = Person.getInstance();
BasicAttribute attribute0 = new BasicAttribute(ID, person, false);
BasicAttribute attribute1 = new BasicAttribute(ID, person, true);
assertFalse(attribute0.equals(attribute1));
}
/**
* test equals with different value
*/
public void testNotEquals_ByValue() {
String ID = "not equals";
Person person0 = Person.getInstance();
Person person1 = Person.getInstance();
BasicAttribute attribute0 = new BasicAttribute(ID, person0);
BasicAttribute attribute1 = new BasicAttribute(ID, person1);
assertFalse(attribute0.equals(attribute1));
}
public void testEquals_IDNull() {
String strObj = "attribute with null id";
BasicAttribute attribute0 = new BasicAttribute(null, strObj);
BasicAttribute attribute1 = new BasicAttribute(null, strObj);
try {
attribute0.equals(attribute1);
fail("Should throw NullPointerException.");
} catch (NullPointerException e) {
}
}
public void testEquals_ObjNull() {
String id = "no-value";
BasicAttribute attribute0 = new BasicAttribute(id, null);
BasicAttribute attribute1 = new BasicAttribute(id, null);
assertTrue(attribute0.equals(attribute1));
}
public void testEquals_diff_ordered() {
int count = 5;
Person[] persons = new Person[count];
for (int i = 0; i < count; i++) {
persons[i] = Person.getInstance();
}
String id = "un-Ordered";
BasicAttribute unordered0 = new BasicAttribute(id);
BasicAttribute unordered1 = new BasicAttribute(id);
for (int i = 0; i < count; i++) {
unordered0.add(persons[i]);
}
for (int i = count - 1; i > -1; i--) {
unordered1.add(persons[i]);
}
assertEquals(unordered0.size(), unordered1.size());
assertTrue(unordered0.equals(unordered1));
}
/**
* 1. Check ordered.equals(unordered) 2. Check unordered.equals(ordered) 3.
* Check the values have the same order
*/
public void testEquals_Ordered_Unordered_1() {
int count = 5;
Person[] persons = new Person[count];
for (int i = 0; i < count; i++) {
persons[i] = Person.getInstance();
}
for (int i = 0; i < count; i++) {
orderedAttribute.add(persons[i]);
unorderedAttribute.add(persons[i]);
}
assertFalse(orderedAttribute.equals(unorderedAttribute));
assertFalse(unorderedAttribute.equals(orderedAttribute));
}
/**
* 1. Check ordered.equals(unordered) 2. Check unordered.equals(ordered) 3.
* the values have the different order
*/
public void testEquals_Ordered_Unordered_2() {
int count = 5;
Person[] persons = new Person[count];
for (int i = 0; i < count; i++) {
persons[i] = Person.getInstance();
}
for (int i = 0; i < count; i++) {
orderedAttribute.add(persons[i]);
}
for (int i = count - 1; i > -1; i--) {
unorderedAttribute.add(persons[i]);
}
assertFalse(unorderedAttribute.equals(orderedAttribute));
assertFalse(orderedAttribute.equals(unorderedAttribute));
}
public void testHashCode_simple() throws NamingException {
int count = 5;
for (int i = 0; i < count; i++) {
orderedAttribute.add("Value: " + i);
}
int hashCode = orderedAttribute.getID().hashCode();
for (int i = 0; i < count; i++) {
hashCode += orderedAttribute.get(i).hashCode();
}
assertEquals(hashCode, orderedAttribute.hashCode());
}
public void testHashCode_noValue() {
assertEquals(unorderedAttribute.getID().hashCode(), unorderedAttribute
.hashCode());
}
public void testHashCode_arrayValue() {
String[] strs = { "Blue", "Yellow", null, "Red", };
String id = "Array Attribute";
BasicAttribute attribute = new BasicAttribute(id, strs);
int hashCode = id.hashCode();
for (String element : strs) {
if (element != null) {
hashCode += element.hashCode();
}
}
assertEquals(hashCode, attribute.hashCode());
}
public void testHashCode_intArrayValue() {
int[] numbers = new int[10];
for (int i = 0; i < numbers.length; i++) {
numbers[i] = i * 10;
}
String id = "int-Array";
BasicAttribute attribute = new BasicAttribute(id, numbers);
Person person = Person.getInstance();
attribute.add(person);
int hashCode = id.hashCode() + person.hashCode();
for (int element : numbers) {
hashCode += element;
}
assertEquals(hashCode, attribute.hashCode());
}
public void testHashCode_DoubleArray() {
Random random = new Random(100);
double[] doubles = new double[10];
for (int i = 0; i < doubles.length; i++) {
doubles[i] = random.nextDouble() * 1000;
}
String id = "double-Array";
BasicAttribute attribute = new BasicAttribute(id, doubles);
int hashCode = id.hashCode();
for (double element : doubles) {
hashCode += new Double(element).hashCode();
}
assertEquals(hashCode, attribute.hashCode());
}
public void testHashCode_IDnull() {
BasicAttribute attribute = new BasicAttribute(null, "ID==NULL");
try {
attribute.hashCode();
fail("Should throw NullPointerException.");
} catch (NullPointerException e) {
}
}
public void testHashCode_ObjNull() {
String id = "nulls";
BasicAttribute attribute = new BasicAttribute(id, true);
for (int i = 0; i < 5; i++) {
attribute.add(null);
}
assertEquals(id.hashCode(), attribute.hashCode());
}
public void testToString_simple() {
// TO DO: explore behavior
int count = 5;
for (int i = 0; i < count; i++) {
orderedAttribute.add("Value: " + i);
}
assertNotNull(orderedAttribute.toString());
}
public void testToString_noValue() {
// TO DO: explore behavior
/*
* assertEquals( "Attribute ID: Unordered_Attribute\nAttribute values:
* This Attribute does not have any values.\n",
* unorderedAttribute.toString());
*/
assertNotNull(unorderedAttribute.toString());
}
public void testToString_ArrayValue() {
// TO DO: explore behavior
String[] strs = { "Blue", "Yellow", null, "Red", };
String id = "Array Attribute";
BasicAttribute attribute = new BasicAttribute(id, strs);
/*
* assertEquals( "Attribute ID: " + id + "\nAttribute values: " +
* strs.toString() + "\n", attribute.toString());
*/
assertNotNull(attribute.toString());
}
public void testToString_intValue() {
// TO DO: explore behavior
int[] numbers = new int[10];
for (int i = 0; i < numbers.length; i++) {
numbers[i] = i * 10;
}
String id = "int-Array";
BasicAttribute attribute = new BasicAttribute(id, numbers);
/*
* assertEquals( "Attribute ID: " + id + "\nAttribute values: " +
* numbers.toString() + "\n", attribute.toString());
*/
assertNotNull(attribute.toString());
}
public void testToString_doubleValue() {
// TO DO: explore behavior
Random random = new Random(1000);
double[] doubles = new double[10];
for (int i = 0; i < doubles.length; i++) {
doubles[i] = random.nextDouble() * 1000;
}
String id = "double-Array";
BasicAttribute attribute = new BasicAttribute(id, doubles);
/*
* assertEquals( "Attribute ID: " + id + "\nAttribute values: " +
* doubles.toString() + "\n", attribute.toString());
*/
assertNotNull(attribute.toString());
}
public void testToString_nullValue() {
// TO DO: explore behavior
String id = "nulls";
BasicAttribute attribute = new BasicAttribute(id, true);
for (int i = 0; i < 5; i++) {
attribute.add(null);
}
assertNotNull(attribute.toString());
}
public void testToString_IDNull() {
// TO DO: explore behavior
BasicAttribute attribute = new BasicAttribute(null, "ID==NULL");
assertNotNull(attribute.toString());
}
public void testSerializable_Simple() throws ClassNotFoundException,
IOException {
int count = 5;
Person[] persons = new Person[count];
for (int i = 0; i < count; i++) {
persons[i] = Person.getInstance();
unorderedAttribute.add(persons[i]);
}
// write to byte array
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(baos);
oos.writeObject(unorderedAttribute);
byte[] buffer = baos.toByteArray();
// read from byte array
ByteArrayInputStream bais = new ByteArrayInputStream(buffer);
ObjectInputStream ois = new ObjectInputStream(bais);
BasicAttribute attribute2 = (BasicAttribute) ois.readObject();
assertEquals(unorderedAttribute, attribute2);
}
public void testSerializable_compatibility() throws ClassNotFoundException,
IOException {
ObjectInputStream ois = new ObjectInputStream(
getClass()
.getClassLoader()
.getResourceAsStream(
"/serialization/javax/naming/directory/BasicAttribute.ser"));
BasicAttribute attribute2 = (BasicAttribute) ois.readObject();
BasicAttribute attribute = new BasicAttribute("serializeBasicAttribute");
int count = 10;
for (int i = 0; i < count; i++) {
attribute.add("Int value: " + i * 10);
}
assertEquals(attribute, attribute2);
// TO DO: cause an EOFException
}
}
|
google/closure-compiler | 35,921 | test/com/google/javascript/jscomp/PeepholeMinimizeConditionsTest.java | /*
* Copyright 2004 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Tests for {@link PeepholeMinimizeConditions} in isolation. Tests for the interaction of multiple
* peephole passes are in PeepholeIntegrationTest.
*/
@RunWith(JUnit4.class)
public final class PeepholeMinimizeConditionsTest extends CompilerTestCase {
private boolean late = true;
public PeepholeMinimizeConditionsTest() {
super(DEFAULT_EXTERNS);
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
late = true;
disableTypeCheck();
}
@Override
protected CompilerPass getProcessor(final Compiler compiler) {
PeepholeOptimizationsPass peepholePass =
new PeepholeOptimizationsPass(compiler, getName(), new PeepholeMinimizeConditions(late));
peepholePass.setRetraverseOnChange(false);
return peepholePass;
}
private void foldSame(String js) {
testSame(js);
}
private void fold(String js, String expected) {
test(js, expected);
}
/** Check that removing blocks with 1 child works */
@Test
public void testFoldOneChildBlocks() {
late = false;
fold("function f(){if(x)a();x=3}", "function f(){x&&a();x=3}");
fold("function f(){if(x)a?.();x=3}", "function f(){x&&a?.();x=3}");
fold("function f(){if(x){a()}x=3}", "function f(){x&&a();x=3}");
fold("function f(){if(x){a?.()}x=3}", "function f(){x&&a?.();x=3}");
fold("function f(){if(x){return 3}}", "function f(){if(x)return 3}");
fold("function f(){if(x){a()}}", "function f(){x&&a()}");
fold("function f(){if(x){throw 1}}", "function f(){if(x)throw 1;}");
// Try it out with functions
fold("function f(){if(x){foo()}}", "function f(){x&&foo()}");
fold("function f(){if(x){foo()}else{bar()}}", "function f(){x?foo():bar()}");
// Try it out with properties and methods
fold("function f(){if(x){a.b=1}}", "function f(){if(x)a.b=1}");
fold("function f(){if(x){a.b*=1}}", "function f(){x&&(a.b*=1)}");
fold("function f(){if(x){a.b+=1}}", "function f(){x&&(a.b+=1)}");
fold("function f(){if(x){++a.b}}", "function f(){x&&++a.b}");
fold("function f(){if(x){a.foo()}}", "function f(){x&&a.foo()}");
fold("function f(){if(x){a?.foo()}}", "function f(){x&&a?.foo()}");
// Try it out with throw/catch/finally [which should not change]
foldSame("function f(){try{foo()}catch(e){bar(e)}finally{baz()}}");
// Try it out with switch statements
foldSame("function f(){switch(x){case 1:break}}");
// Do while loops stay in a block if that's where they started
foldSame("function f(){if(e1){do foo();while(e2)}else foo2()}");
// Test an obscure case with do and while
fold("if(x){do{foo()}while(y)}else bar()", "if(x){do foo();while(y)}else bar()");
// Play with nested IFs
fold("function f(){if(x){if(y)foo()}}", "function f(){x && (y && foo())}");
fold("function f(){if(x){if(y)foo();else bar()}}", "function f(){x&&(y?foo():bar())}");
fold("function f(){if(x){if(y)foo()}else bar()}", "function f(){x?y&&foo():bar()}");
fold(
"function f(){if(x){if(y)foo();else bar()}else{baz()}}",
"function f(){x?y?foo():bar():baz()}");
fold("if(e1){while(e2){if(e3){foo()}}}else{bar()}", "if(e1)while(e2)e3&&foo();else bar()");
fold("if(e1){with(e2){if(e3){foo()}}}else{bar()}", "if(e1)with(e2)e3&&foo();else bar()");
fold("if(a||b){if(c||d){var x;}}", "if(a||b)if(c||d)var x");
fold("if(x){ if(y){var x;}else{var z;} }", "if(x)if(y)var x;else var z");
// NOTE - technically we can remove the blocks since both the parent
// and child have elses. But we don't since it causes ambiguities in
// some cases where not all descendent ifs having elses
fold("if(x){ if(y){var x;}else{var z;} }else{var w}", "if(x)if(y)var x;else var z;else var w");
fold("if (x) {var x;}else { if (y) { var y;} }", "if(x)var x;else if(y)var y");
// Here's some of the ambiguous cases
fold(
"if(a){if(b){f1();f2();}else if(c){f3();}}else {if(d){f4();}}",
"if(a)if(b){f1();f2()}else c&&f3();else d&&f4()");
fold("function f(){foo()}", "function f(){foo()}");
fold("switch(x){case y: foo()}", "switch(x){case y:foo()}");
fold("try{foo()}catch(ex){bar()}finally{baz()}", "try{foo()}catch(ex){bar()}finally{baz()}");
}
/** Try to minimize returns */
@Test
public void testFoldReturns() {
fold("function f(){if(x)return 1;else return 2}", "function f(){return x?1:2}");
fold("function f(){if(x)return 1;return 2}", "function f(){return x?1:2}");
fold("function f(){if(x)return;return 2}", "function f(){return x?void 0:2}");
fold("function f(){if(x)return 1+x;else return 2-x}", "function f(){return x?1+x:2-x}");
fold("function f(){if(x)return 1+x;return 2-x}", "function f(){return x?1+x:2-x}");
fold(
"function f(){if(x)return y += 1;else return y += 2}",
"function f(){return x?(y+=1):(y+=2)}");
fold("function f(){if(x)return;else return 2-x}", "function f(){if(x);else return 2-x}");
fold("function f(){if(x)return;return 2-x}", "function f(){return x?void 0:2-x}");
fold("function f(){if(x)return x;else return}", "function f(){if(x)return x;{}}");
fold("function f(){if(x)return x;return}", "function f(){if(x)return x}");
foldSame("function f(){for(var x in y) { return x.y; } return k}");
}
@Test
public void testCombineIfs1() {
fold("function f() {if (x) return 1; if (y) return 1}", "function f() {if (x||y) return 1;}");
fold(
"function f() {if (x) return 1; if (y) foo(); else return 1}",
"function f() {if ((!x)&&y) foo(); else return 1;}");
}
@Test
public void testCombineIfs2() {
// combinable but not yet done
foldSame("function f() {if (x) throw 1; if (y) throw 1}");
// Can't combine, side-effect
fold("function f(){ if (x) g(); if (y) g() }", "function f(){ x&&g(); y&&g() }");
fold("function f(){ if (x) g?.(); if (y) g?.() }", "function f(){ x&&g?.(); y&&g?.() }");
// Can't combine, side-effect
fold("function f(){ if (x) y = 0; if (y) y = 0; }", "function f(){ x&&(y = 0); y&&(y = 0); }");
}
@Test
public void testCombineIfs3() {
foldSame("function f() {if (x) return 1; if (y) {g();f()}}");
}
/** Try to minimize assignments */
@Test
public void testFoldAssignments() {
fold("function f(){if(x)y=3;else y=4;}", "function f(){y=x?3:4}");
fold("function f(){if(x)y=1+a;else y=2+a;}", "function f(){y=x?1+a:2+a}");
// and operation assignments
fold("function f(){if(x)y+=1;else y+=2;}", "function f(){y+=x?1:2}");
fold("function f(){if(x)y-=1;else y-=2;}", "function f(){y-=x?1:2}");
fold("function f(){if(x)y%=1;else y%=2;}", "function f(){y%=x?1:2}");
fold("function f(){if(x)y|=1;else y|=2;}", "function f(){y|=x?1:2}");
// Don't fold if the 2 ops don't match.
foldSame("function f(){x ? y-=1 : y+=2}");
// Don't fold if the 2 LHS don't match.
foldSame("function f(){x ? y-=1 : z-=1}");
// Don't fold if there are potential effects.
foldSame("function f(){x ? y().a=3 : y().a=4}");
}
@Test
public void testRemoveDuplicateStatements() {
enableNormalize();
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
fold("if (a) { x = 1; x++ } else { x = 2; x++ }", "x=(a) ? 1 : 2; x++");
fold(
"""
if (a) { x = 1; x++; y += 1; z = pi; }
else { x = 2; x++; y += 1; z = pi; }
""",
"x=(a) ? 1 : 2; x++; y += 1; z = pi;");
fold(
"""
function z() {
if (a) { foo(); return !0 } else { goo(); return !0 }
}
""",
"function z() {(a) ? foo() : goo(); return !0}");
fold(
"""
function z() {if (a) { foo(); x = true; return true
} else { goo(); x = true; return true }}
""",
"function z() {(a) ? foo() : goo(); x = true; return true}");
fold(
"""
function z() {
if (a) { bar(); foo(); return true }
else { bar(); goo(); return true }
}
""",
"""
function z() {
if (a) { bar(); foo(); }
else { bar(); goo(); }
return true;
}
""");
}
@Test
public void testFoldReturnsIntegration2() {
late = true;
disableNormalize();
// if-then-else duplicate statement removal handles this case:
testSame("function test(a) {if (a) {const a = Math.random();if(a) {return a;}} return a; }");
}
@Test
public void testDontRemoveDuplicateStatementsWithoutNormalization() {
// In the following test case, we can't remove the duplicate "alert(x);" lines since each "x"
// refers to a different variable.
// We only try removing duplicate statements if the AST is normalized and names are unique.
testSame("if (Math.random() < 0.5) { const x = 3; alert(x); } else { const x = 5; alert(x); }");
}
@Test
public void testNotCond() {
fold("function f(){if(!x)foo()}", "function f(){x||foo()}");
fold("function f(){if(!x)b=1}", "function f(){x||(b=1)}");
fold("if(!x)z=1;else if(y)z=2", "if(x){y&&(z=2);}else{z=1;}");
fold("if(x)y&&(z=2);else z=1;", "x ? y&&(z=2) : z=1");
fold("function f(){if(!(x=1))a.b=1}", "function f(){(x=1)||(a.b=1)}");
}
@Test
public void testAndParenthesesCount() {
fold("function f(){if(x||y)a.foo()}", "function f(){(x||y)&&a.foo()}");
fold("function f(){if(x.a)x.a=0}", "function f(){x.a&&(x.a=0)}");
fold("function f(){if(x?.a)x.a=0}", "function f(){x?.a&&(x.a=0)}");
foldSame("function f(){if(x()||y()){x()||y()}}");
}
@Test
public void testFoldLogicalOpStringCompare() {
// side-effects
// There is two way to parse two &&'s and both are correct.
fold("if (foo() && false) z()", "(foo(), 0) && z()");
}
@Test
public void testFoldNot() {
fold("while(!(x==y)){a=b;}", "while(x!=y){a=b;}");
fold("while(!(x!=y)){a=b;}", "while(x==y){a=b;}");
fold("while(!(x===y)){a=b;}", "while(x!==y){a=b;}");
fold("while(!(x!==y)){a=b;}", "while(x===y){a=b;}");
// Because !(x<NaN) != x>=NaN don't fold < and > cases.
foldSame("while(!(x>y)){a=b;}");
foldSame("while(!(x>=y)){a=b;}");
foldSame("while(!(x<y)){a=b;}");
foldSame("while(!(x<=y)){a=b;}");
foldSame("while(!(x<=NaN)){a=b;}");
// NOT forces a boolean context
fold("x = !(y() && true)", "x = !y()");
// This will be further optimized by PeepholeFoldConstants.
fold("x = !true", "x = !1");
}
@Test
public void testMinimizeExprCondition() {
fold("(x ? true : false) && y()", "x&&y()");
fold("(x ? false : true) && y()", "(!x)&&y()");
fold("(x ? true : y) && y()", "(x || y)&&y()");
fold("(x ? y : false) && y()", "(x && y)&&y()");
fold("(x && true) && y()", "x && y()");
fold("(x && false) && y()", "0&&y()");
fold("(x || true) && y()", "1&&y()");
fold("(x || false) && y()", "x&&y()");
}
@Test
public void testMinimizeWhileCondition() {
// This test uses constant folding logic, so is only here for completeness.
fold("while(!!true) foo()", "while(1) foo()");
// These test tryMinimizeCondition
fold("while(!!x) foo()", "while(x) foo()");
fold("while(!(!x&&!y)) foo()", "while(x||y) foo()");
fold("while(x||!!y) foo()", "while(x||y) foo()");
fold("while(!(!!x&&y)) foo()", "while(!x||!y) foo()");
fold("while(!(!x&&y)) foo()", "while(x||!y) foo()");
fold("while(!(x||!y)) foo()", "while(!x&&y) foo()");
fold("while(!(x||y)) foo()", "while(!x&&!y) foo()");
fold("while(!(!x||y-z)) foo()", "while(x&&!(y-z)) foo()");
fold("while(!(!(x/y)||z+w)) foo()", "while(x/y&&!(z+w)) foo()");
foldSame("while(!(x+y||z)) foo()");
foldSame("while(!(x&&y*z)) foo()");
fold("while(!(!!x&&y)) foo()", "while(!x||!y) foo()");
fold("while(x&&!0) foo()", "while(x) foo()");
fold("while(x||!1) foo()", "while(x) foo()");
fold("while(!((x,y)&&z)) foo()", "while((x,!y)||!z) foo()");
}
@Test
public void testMinimizeDemorganRemoveLeadingNot() {
fold("if(!(!a||!b)&&c) foo()", "((a&&b)&&c)&&foo()");
fold("if(!(x&&y)) foo()", "x&&y||foo()");
fold("if(!(x||y)) foo()", "(x||y)||foo()");
}
@Test
public void testMinimizeDemorgan1() {
fold("if(!a&&!b)foo()", "(a||b)||foo()");
}
@Test
public void testMinimizeDemorgan2() {
// Make sure trees with cloned functions are marked as changed
fold("(!(a&&!((function(){})())))||foo()", "!a||(function(){})()||foo()");
}
@Test
public void testMinimizeDemorgan2b() {
// Make sure unchanged trees with functions are not marked as changed
foldSame("!a||(function(){})()||foo()");
}
@Test
public void testMinimizeDemorgan3() {
fold("if((!a||!b)&&(c||d)) foo()", "(a&&b||!c&&!d)||foo()");
}
@Test
public void testMinimizeDemorgan5() {
fold("if((!a||!b)&&c) foo()", "(a&&b||!c)||foo()");
}
@Test
public void testMinimizeDemorgan11() {
fold(
"if (x && (y===2 || !f()) && (y===3 || !h())) foo()",
"(!x || y!==2 && f() || y!==3 && h()) || foo()");
}
@Test
public void testMinimizeDemorgan20a() {
fold(
"if (0===c && (2===a || 1===a)) f(); else g()",
"if (0!==c || 2!==a && 1!==a) g(); else f()");
}
@Test
public void testMinimizeDemorgan20b() {
fold("if (0!==c || 2!==a && 1!==a) g(); else f()", "(0!==c || 2!==a && 1!==a) ? g() : f()");
}
@Test
public void testPreserveIf() {
foldSame("if(!a&&!b)for(;f(););");
}
@Test
public void testNoSwapWithDanglingElse() {
foldSame("if(!x) {for(;;)foo(); for(;;)bar()} else if(y) for(;;) f()");
foldSame("if(!a&&!b) {for(;;)foo(); for(;;)bar()} else if(y) for(;;) f()");
}
@Test
public void testMinimizeHook() {
fold("x ? x : y", "x || y");
// We assume GETPROPs don't have side effects.
fold("x.y ? x.y : x.z", "x.y || x.z");
fold("x?.y ? x?.y : x.z", "x?.y || x.z");
fold("x?.y ? x?.y : x?.z", "x?.y || x?.z");
// This can be folded if x() does not have side effects.
foldSame("x() ? x() : y()");
foldSame("x?.() ? x?.() : y()");
fold("!x ? foo() : bar()", "x ? bar() : foo()");
fold("while(!(x ? y : z)) foo();", "while(x ? !y : !z) foo();");
fold("(x ? !y : !z) ? foo() : bar()", "(x ? y : z) ? bar() : foo()");
}
@Test
public void testMinimizeComma() {
fold("while(!(inc(), test())) foo();", "while(inc(), !test()) foo();");
fold("(inc(), !test()) ? foo() : bar()", "(inc(), test()) ? bar() : foo()");
}
@Test
public void testMinimizeExprResult() {
fold("!x||!y", "x&&y");
fold("if(!(x&&!y)) foo()", "(!x||y)&&foo()");
fold("if(!x||y) foo()", "(!x||y)&&foo()");
fold("(!x||y)&&foo()", "x&&!y||!foo()");
}
@Test
public void testMinimizeDemorgan21() {
fold("if (0===c && (2===a || 1===a)) f()", "(0!==c || 2!==a && 1!==a) || f()");
}
@Test
public void testMinimizeAndOr1() {
fold("if ((!a || !b) && (d || e)) f()", "(a&&b || !d&&!e) || f()");
}
@Test
public void testMinimizeForCondition() {
// This test uses constant folding logic, so is only here for completeness.
// These could be simplified to "for(;;) ..."
fold("for(;!!true;) foo()", "for(;1;) foo()");
// Verify function deletion tracking.
fold("if(!!true||function(){}) {}", "if(1) {}");
// Don't bother with FOR inits as there are normalized out.
fold("for(!!true;;) foo()", "for(!0;;) foo()");
// These test tryMinimizeCondition
fold("for(;!!x;) foo()", "for(;x;) foo()");
foldSame("for(a in b) foo()");
foldSame("for(a in {}) foo()");
foldSame("for(a in []) foo()");
fold("for(a in !!true) foo()", "for(a in !0) foo()");
foldSame("for(a of b) foo()");
foldSame("for(a of {}) foo()");
foldSame("for(a of []) foo()");
fold("for(a of !!true) foo()", "for(a of !0) foo()");
}
@Test
public void testMinimizeCondition_example1() {
// Based on a real failing code sample.
fold("if(!!(f() > 20)) {foo();foo()}", "if(f() > 20){foo();foo()}");
}
@Test
public void testFoldLoopBreakLate() {
late = true;
fold("for(;;) if (a) break", "for(;!a;);");
foldSame("for(;;) if (a) { f(); break }");
fold("for(;;) if (a) break; else f()", "for(;!a;) { { f(); } }");
fold("for(;a;) if (b) break", "for(;a && !b;);");
fold("for(;a;) { if (b) break; if (c) break; }", "for(;(a && !b);) if (c) break;");
fold("for(;(a && !b);) if (c) break;", "for(;(a && !b) && !c;);");
fold("for(;;) { if (foo) { break; var x; } } x;", "var x; for(;!foo;) {} x;");
// 'while' is normalized to 'for'
enableNormalize();
fold("while(true) if (a) break", "for(;1&&!a;);");
disableNormalize();
}
@Test
public void testFoldLoopBreakEarly() {
late = false;
foldSame("for(;;) if (a) break");
foldSame("for(;;) if (a) { f(); break }");
foldSame("for(;;) if (a) break; else f()");
foldSame("for(;a;) if (b) break");
foldSame("for(;a;) { if (b) break; if (c) break; }");
foldSame("while(1) if (a) break");
enableNormalize();
foldSame("for (; 1; ) if (a) break");
}
@Test
public void testFoldConditionalVarDeclaration() {
fold("if(x) var y=1;else y=2", "var y=x?1:2");
fold("if(x) y=1;else var y=2", "var y=x?1:2");
foldSame("if(x) var y = 1; z = 2");
foldSame("if(x||y) y = 1; var z = 2");
foldSame("if(x) { var y = 1; print(y)} else y = 2 ");
foldSame("if(x) var y = 1; else {y = 2; print(y)}");
}
@Test
public void testFoldIfWithLowerOperatorsInside() {
fold("if (x + (y=5)) z && (w,z);", "x + (y=5) && (z && (w,z))");
fold("if (!(x+(y=5))) z && (w,z);", "x + (y=5) || z && (w,z)");
fold(
"if (x + (y=5)) if (z && (w,z)) for(;;) foo();",
"if (x + (y=5) && (z && (w,z))) for(;;) foo();");
}
@Test
public void testSubsituteReturn() {
late = false;
enableNormalize();
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
fold("function f() { while(x) { return }}", "function f() { while(x) { break }}");
foldSame("function f() { while(x) { return 5 } }");
foldSame("function f() { a: { return 5 } }");
fold(
"function f() { while(x) { return 5} return 5}",
"function f() { while(x) { break } return 5}");
fold(
"function f() { while(x) { return x} return x}",
"function f() { while(x) { break } return x}");
fold(
"function f() { while(x) { if (y) { return }}}",
"function f() { while(x) { if (y) { break }}}");
fold(
"function f() { while(x) { if (y) { return }} return}",
"function f() { while(x) { if (y) { break }}}");
fold(
"function f() { while(x) { if (y) { return 5 }} return 5}",
"function f() { while(x) { if (y) { break }} return 5}");
// It doesn't matter if x is changed between them. We are still returning
// x at whatever x value current holds. The whole x = 1 is skipped.
fold(
"function f() { while(x) { if (y) { return x } x = 1} return x}",
"function f() { while(x) { if (y) { break } x = 1} return x}");
fold(
"function f() { while(x) { if (y) { return x } return x} return x}",
"function f() { while(x) { if (y) {} break }return x}");
// A break here only breaks out of the inner loop.
foldSame("function f() { while(x) { while (y) { return } } }");
foldSame("function f() { while(1) { return 7} return 5}");
foldSame(
"""
function f() {
try { while(x) {return f()}} catch (e) { } return f()}
""");
foldSame(
"""
function f() {
try { while(x) {return f()}} finally {alert(1)} return f()}
""");
// Both returns has the same handler
fold(
"""
function f() {
try { while(x) { return f() } return f() } catch (e) { } }
""",
"""
function f() {
try { while(x) { break } return f() } catch (e) { } }
""");
// We can't fold this because it'll change the order of when foo is called.
foldSame(
"""
function f() {
try { while(x) { return foo() } } finally { alert(1) }
return foo()}
""");
// This is fine, we have no side effect in the return value.
fold(
"""
function f() {
try { while(x) { return 1 } } finally { alert(1) } return 1}
""",
"""
function f() {
try { while(x) { break } } finally { alert(1) } return 1}
""");
foldSame("function f() { try{ return a } finally { a = 2 } return a; }");
fold(
"function f() { switch(a){ case 1: return a; default: g();} return a;}",
"function f() { switch(a){ case 1: break; default: g();} return a; }");
}
@Test
public void testSubsituteBreakForThrow() {
late = false;
enableNormalize();
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
foldSame("function f() { while(x) { throw Error }}");
fold(
"function f() { while(x) { throw Error } throw Error }",
"function f() { while(x) { break } throw Error}");
foldSame("function f() { while(x) { throw Error(1) } throw Error(2)}");
foldSame("function f() { while(x) { throw Error(1) } return Error(2)}");
foldSame("function f() { while(x) { throw 5 } }");
foldSame("function f() { a: { throw 5 } }");
fold(
"function f() { while(x) { throw 5} throw 5}",
"function f() { while(x) { break } throw 5}");
fold(
"function f() { while(x) { throw x} throw x}",
"function f() { while(x) { break } throw x}");
foldSame("function f() { while(x) { if (y) { throw Error }}}");
fold(
"function f() { while(x) { if (y) { throw Error }} throw Error}",
"function f() { while(x) { if (y) { break }} throw Error}");
fold(
"function f() { while(x) { if (y) { throw 5 }} throw 5}",
"function f() { while(x) { if (y) { break }} throw 5}");
// It doesn't matter if x is changed between them. We are still throwing
// x at whatever x value current holds. The whole x = 1 is skipped.
fold(
"function f() { while(x) { if (y) { throw x } x = 1} throw x}",
"function f() { while(x) { if (y) { break } x = 1} throw x}");
fold(
"function f() { while(x) { if (y) { throw x } throw x} throw x}",
"function f() { while(x) { if (y) {} break }throw x}");
// A break here only breaks out of the inner loop.
foldSame("function f() { while(x) { while (y) { throw Error } } }");
foldSame("function f() { while(1) { throw 7} throw 5}");
foldSame(
"""
function f() {
try { while(x) {throw f()}} catch (e) { } throw f()}
""");
foldSame(
"""
function f() {
try { while(x) {throw f()}} finally {alert(1)} throw f()}
""");
// Both throws has the same handler
fold(
"""
function f() {
try { while(x) { throw f() } throw f() } catch (e) { } }
""",
"""
function f() {
try { while(x) { break } throw f() } catch (e) { } }
""");
// We can't fold this because it'll change the order of when foo is called.
foldSame(
"""
function f() {
try { while(x) { throw foo() } } finally { alert(1) }
throw foo()}
""");
// This is fine, we have no side effect in the throw value.
fold(
"""
function f() {
try { while(x) { throw 1 } } finally { alert(1) } throw 1}
""",
"""
function f() {
try { while(x) { break } } finally { alert(1) } throw 1}
""");
foldSame("function f() { try{ throw a } finally { a = 2 } throw a; }");
fold(
"function f() { switch(a){ case 1: throw a; default: g();} throw a;}",
"function f() { switch(a){ case 1: break; default: g();} throw a; }");
}
@Test
public void testRemoveDuplicateReturn() {
late = false;
enableNormalize();
fold("function f() { return; }", "function f(){}");
foldSame("function f() { return a; }");
fold("function f() { if (x) { return a } return a; }", "function f() { if (x) {} return a; }");
foldSame("function f() { try { if (x) { return a } } catch(e) {} return a; }");
foldSame("function f() { try { if (x) {} } catch(e) {} return 1; }");
// finally clauses may have side effects
foldSame("function f() { try { if (x) { return a } } finally { a++ } return a; }");
// but they don't matter if the result doesn't have side effects and can't
// be affect by side-effects.
fold(
"function f() { try { if (x) { return 1 } } finally {} return 1; }",
"function f() { try { if (x) {} } finally {} return 1; }");
fold(
"function f() { switch(a){ case 1: return a; } return a; }",
"function f() { switch(a){ case 1: } return a; }");
fold(
"""
function f() { switch(a){
case 1: return a; case 2: return a; } return a; }
""",
"""
function f() { switch(a){
case 1: break; case 2: } return a; }
""");
}
@Test
public void testRemoveDuplicateThrow() {
late = false;
enableNormalize();
foldSame("function f() { throw a; }");
fold("function f() { if (x) { throw a } throw a; }", "function f() { if (x) {} throw a; }");
foldSame("function f() { try { if (x) {throw a} } catch(e) {} throw a; }");
foldSame("function f() { try { if (x) {throw 1} } catch(e) {f()} throw 1; }");
foldSame("function f() { try { if (x) {throw 1} } catch(e) {f()} throw 1; }");
foldSame("function f() { try { if (x) {throw 1} } catch(e) {throw 1}}");
fold(
"function f() { try { if (x) {throw 1} } catch(e) {throw 1} throw 1; }",
"function f() { try { if (x) {throw 1} } catch(e) {} throw 1; }");
// finally clauses may have side effects
foldSame("function f() { try { if (x) { throw a } } finally { a++ } throw a; }");
// but they don't matter if the result doesn't have side effects and can't
// be affect by side-effects.
fold(
"function f() { try { if (x) { throw 1 } } finally {} throw 1; }",
"function f() { try { if (x) {} } finally {} throw 1; }");
fold(
"function f() { switch(a){ case 1: throw a; } throw a; }",
"function f() { switch(a){ case 1: } throw a; }");
fold(
"""
function f() { switch(a){
case 1: throw a; case 2: throw a; } throw a; }
""",
"function f() { switch(a){ case 1: break; case 2: } throw a; }");
}
@Test
public void testNestedIfCombine() {
fold("if(x)if(y){while(1){}}", "if(x&&y){while(1){}}");
fold("if(x||z)if(y){while(1){}}", "if((x||z)&&y){while(1){}}");
fold("if(x)if(y||z){while(1){}}", "if((x)&&(y||z)){while(1){}}");
foldSame("if(x||z)if(y||z){while(1){}}");
fold("if(x)if(y){if(z){while(1){}}}", "if(x&&(y&&z)){while(1){}}");
}
// See: http://blickly.github.io/closure-compiler-issues/#291
@Test
public void testIssue291() {
fold("if (true) { f.onchange(); }", "if (1) f.onchange();");
foldSame("if (f) { f.onchange(); }");
foldSame("if (f) { f.bar(); } else { f.onchange(); }");
fold("if (f) { f.bonchange(); }", "f && f.bonchange();");
foldSame("if (f) { f['x'](); }");
// optional versions
fold("if (true) { f?.onchange(); }", "if (1) f?.onchange();");
foldSame("if (f) { f?.onchange(); }");
foldSame("if (f) { f?.bar(); } else { f?.onchange(); }");
fold("if (f) { f?.bonchange(); }", "f && f?.bonchange();");
foldSame("if (f) { f?.['x'](); }");
}
@Test
public void testObjectLiteral() {
test("({})", "1");
test("({a:1})", "1");
testSame("({a:foo()})");
testSame("({'a':foo()})");
}
@Test
public void testArrayLiteral() {
test("([])", "1");
test("([1])", "1");
test("([a])", "1");
testSame("([foo()])");
}
@Test
public void testRemoveElseCause() {
test(
"""
function f() {
if(x) return 1;
else if(x) return 2;
else if(x) return 3 }
""",
"""
function f() {
if(x) return 1;
{ if(x) return 2;
{ if(x) return 3 } } }
""");
}
@Test
public void testRemoveElseCause1() {
test("function f() { if (x) throw 1; else f() }", "function f() { if (x) throw 1; { f() } }");
}
@Test
public void testRemoveElseCause2() {
test("function f() { if (x) return 1; else f() }", "function f() { if (x) return 1; { f() } }");
test("function f() { if (x) return; else f() }", "function f() { if (x) {} else { f() } }");
// This case is handled by minimize exit points.
testSame("function f() { if (x) return; f() }");
}
@Test
public void testRemoveElseCause3() {
testSame("function f() { a:{if (x) break a; else f() } }");
testSame("function f() { if (x) { a:{ break a } } else f() }");
testSame("function f() { if (x) a:{ break a } else f() }");
}
@Test
public void testRemoveElseCause4() {
testSame("function f() { if (x) { if (y) { return 1; } } else f() }");
}
@Test
public void testIssue925() {
test(
"""
if (x[--y] === 1) {
x[y] = 0;
} else {
x[y] = 1;
}
""",
"(x[--y] === 1) ? x[y] = 0 : x[y] = 1;");
test(
"""
if (x[--y]) {
a = 0;
} else {
a = 1;
}
""",
"a = (x[--y]) ? 0 : 1;");
test(
"""
if (x?.[--y]) {
a = 0;
} else {
a = 1;
}
""",
"a = (x?.[--y]) ? 0 : 1;");
test("if (x++) { x += 2 } else { x += 3 }", "x++ ? x += 2 : x += 3");
test("if (x++) { x = x + 2 } else { x = x + 3 }", "x = x++ ? x + 2 : x + 3");
}
@Test
public void testCoercionSubstitution_disabled() {
enableTypeCheck();
testSame("var x = {}; if (x != null) throw 'a';");
testSame("var x = {}; var y = x != null;");
testSame("var x = 1; if (x != 0) throw 'a';");
testSame("var x = 1; var y = x != 0;");
}
@Test
public void testCoercionSubstitution_booleanResult0() {
enableTypeCheck();
testSame("var x = {}; var y = x != null;");
}
@Test
public void testCoercionSubstitution_booleanResult1() {
enableTypeCheck();
testSame("var x = {}; var y = x == null;");
testSame("var x = {}; var y = x !== null;");
testSame("var x = undefined; var y = x !== null;");
testSame("var x = {}; var y = x === null;");
testSame("var x = undefined; var y = x === null;");
testSame("var x = 1; var y = x != 0;");
testSame("var x = 1; var y = x == 0;");
testSame("var x = 1; var y = x !== 0;");
testSame("var x = 1; var y = x === 0;");
}
@Test
public void testCoercionSubstitution_if() {
enableTypeCheck();
test("var x = {};\nif (x != null) throw 'a';\n", "var x={}; if (x!=null) throw 'a'");
testSame("var x = {};\nif (x == null) throw 'a';\n");
testSame("var x = {};\nif (x !== null) throw 'a';\n");
testSame("var x = {};\nif (x === null) throw 'a';\n");
testSame("var x = {};\nif (null != x) throw 'a';\n");
testSame("var x = {};\nif (null == x) throw 'a';\n");
testSame("var x = {};\nif (null !== x) throw 'a';\n");
testSame("var x = {};\nif (null === x) throw 'a';\n");
testSame("var x = 1;\nif (x != 0) throw 'a';\n");
testSame("var x = 1;\nif (x != 0) throw 'a';\n");
testSame("var x = 1;\nif (x == 0) throw 'a';\n");
testSame("var x = 1;\nif (x !== 0) throw 'a';\n");
testSame("var x = 1;\nif (x === 0) throw 'a';\n");
testSame("var x = 1;\nif (0 != x) throw 'a';\n");
testSame("var x = 1;\nif (0 == x) throw 'a';\n");
testSame("var x = 1;\nif (0 !== x) throw 'a';\n");
testSame("var x = 1;\nif (0 === x) throw 'a';\n");
testSame("var x = NaN;\nif (0 === x) throw 'a';\n");
testSame("var x = NaN;\nif (x === 0) throw 'a';\n");
}
@Test
public void testCoercionSubstitution_expression() {
enableTypeCheck();
testSame("var x = {}; x != null && alert('b');");
testSame("var x = 1; x != 0 && alert('b');");
}
@Test
public void testCoercionSubstitution_hook() {
enableTypeCheck();
testSame(
"""
var x = {};
var y = x != null ? 1 : 2;
""");
testSame(
"""
var x = 1;
var y = x != 0 ? 1 : 2;
""");
}
@Test
public void testCoercionSubstitution_not() {
enableTypeCheck();
test(
"var x = {};\nvar y = !(x != null) ? 1 : 2;\n",
"var x = {};\nvar y = (x == null) ? 1 : 2;\n");
test("var x = 1;\nvar y = !(x != 0) ? 1 : 2;\n", "var x = 1;\nvar y = x == 0 ? 1 : 2;\n");
}
@Test
public void testCoercionSubstitution_while() {
enableTypeCheck();
testSame("var x = {}; while (x != null) throw 'a';");
testSame("var x = 1; while (x != 0) throw 'a';");
}
@Test
public void testCoercionSubstitution_unknownType() {
enableTypeCheck();
testSame("var x = /** @type {?} */ ({});\nif (x != null) throw 'a';\n");
testSame("var x = /** @type {?} */ (1);\nif (x != 0) throw 'a';\n");
}
@Test
public void testCoercionSubstitution_allType() {
enableTypeCheck();
testSame("var x = /** @type {*} */ ({});\nif (x != null) throw 'a';\n");
testSame("var x = /** @type {*} */ (1);\nif (x != 0) throw 'a';\n");
}
@Test
public void testCoercionSubstitution_primitivesVsNull() {
enableTypeCheck();
testSame("var x = 0;\nif (x != null) throw 'a';\n");
testSame("var x = '';\nif (x != null) throw 'a';\n");
testSame("var x = false;\nif (x != null) throw 'a';\n");
}
@Test
public void testCoercionSubstitution_nonNumberVsZero() {
enableTypeCheck();
testSame("var x = {};\nif (x != 0) throw 'a';\n");
testSame("var x = '';\nif (x != 0) throw 'a';\n");
testSame("var x = false;\nif (x != 0) throw 'a';\n");
}
@Test
public void testCoercionSubstitution_boxedNumberVsZero() {
enableTypeCheck();
testSame("var x = new Number(0);\nif (x != 0) throw 'a';\n");
}
@Test
public void testCoercionSubstitution_boxedPrimitives() {
enableTypeCheck();
testSame("var x = new Number(); if (x != null) throw 'a';");
testSame("var x = new String(); if (x != null) throw 'a';");
testSame("var x = new Boolean();\nif (x != null) throw 'a';");
}
@Test
public void testMinimizeIfWithNewTargetCondition() {
// Related to https://github.com/google/closure-compiler/issues/3097
test(
"""
function x() {
if (new.target) {
return 1;
} else {
return 2;
}
}
""",
"""
function x() {
return new.target ? 1 : 2;
}
""");
}
}
|
google/closure-compiler | 36,131 | test/com/google/javascript/jscomp/RenameVarsTest.java | /*
* Copyright 2005 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import static com.google.javascript.jscomp.deps.ModuleLoader.LOAD_WARNING;
import static com.google.javascript.rhino.testing.NodeSubject.assertNode;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.javascript.jscomp.deps.ModuleLoader.ResolutionMode;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.testing.NodeSubject;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import org.jspecify.annotations.Nullable;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests for {@link RenameVars}. */
@RunWith(JUnit4.class)
public final class RenameVarsTest extends CompilerTestCase {
private static final String DEFAULT_PREFIX = "";
private String prefix = DEFAULT_PREFIX;
private VariableMap previouslyUsedMap = new VariableMap(ImmutableMap.<String, String>of());
private RenameVars renameVars;
private boolean withClosurePass = false;
private boolean localRenamingOnly = false;
private boolean useGoogleCodingConvention = true;
private boolean generatePseudoNames = false;
private boolean preferStableNames = false;
private boolean withNormalize = false;
// NameGenerator to use, or null for a default.
private @Nullable DefaultNameGenerator nameGenerator = null;
@Override
protected CodingConvention getCodingConvention() {
if (useGoogleCodingConvention) {
return new GoogleCodingConvention();
} else {
return CodingConventions.getDefault();
}
}
@Override
protected CompilerPass getProcessor(Compiler compiler) {
CompilerPass pass;
if (withClosurePass) {
pass = new ClosurePassAndRenameVars(compiler);
} else if (nameGenerator != null) {
pass =
renameVars =
new RenameVars(
compiler,
prefix,
localRenamingOnly,
generatePseudoNames,
preferStableNames,
previouslyUsedMap,
ImmutableSet.of(),
null,
nameGenerator);
} else {
pass =
renameVars =
new RenameVars(
compiler,
prefix,
localRenamingOnly,
generatePseudoNames,
preferStableNames,
previouslyUsedMap,
ImmutableSet.of(),
null,
new DefaultNameGenerator());
}
if (withNormalize) {
// Don't use the standard CompilerTestCase normalization options
// as renaming is a post denormalization operation, but we do still
// want to run the normal normalizations on the input in some cases.
pass = new NormalizePassWrapper(compiler, pass);
}
return pass;
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
disableValidateAstChangeMarking();
previouslyUsedMap = new VariableMap(ImmutableMap.<String, String>of());
prefix = DEFAULT_PREFIX;
withClosurePass = false;
withNormalize = false;
localRenamingOnly = false;
generatePseudoNames = false;
preferStableNames = false;
nameGenerator = null;
}
@Test
public void testRenameSimple() {
test(
"""
function foo(v1, v2) {
return v1;
}
foo();
""",
"""
function a(b, c) {
return b;
}
a();
""");
// Do a sanity check on the source info
final Node lastScript =
getLastCompiler()
.getRoot()
.getLastChild() // first child is externs tree, last / second is sources
.getLastChild();
final String lastScriptName = lastScript.getSourceFileName();
// function foo(v1, v2) { ... }
final NodeSubject fnSubject =
assertNode(lastScript.getFirstChild())
.isFunction()
.hasSourceFileName(lastScriptName)
.hasLineno(1)
.hasCharno(0)
.hasLength(37);
// function foo(v1, v2) {
// ^^^
fnSubject
.hasFirstChildThat()
.isName("a")
.hasSourceFileName(lastScriptName)
.hasLineno(1)
.hasCharno(9)
.hasLength(3);
// function foo(v1, v2) {
// ^^^^^^^^
NodeSubject paramListSubject =
fnSubject
.hasSecondChildThat()
.isParamList()
.hasSourceFileName(lastScriptName)
.hasLineno(1)
.hasCharno(12)
.hasLength(8)
.hasXChildren(2);
// function foo(v1, v2) {
// ^^
paramListSubject
.hasFirstChildThat()
.isName("b")
.hasSourceFileName(lastScriptName)
.hasLineno(1)
.hasCharno(13)
.hasLength(2);
// function foo(v1, v2) {
// ^^
paramListSubject
.hasSecondChildThat()
.isName("c")
.hasSourceFileName(lastScriptName)
.hasLineno(1)
.hasCharno(17)
.hasLength(2);
// { ... }
NodeSubject fnBodySubject =
fnSubject
.hasLastChildThat()
.isBlock()
.hasSourceFileName(lastScriptName)
.hasLineno(1)
.hasCharno(21)
.hasLength(16);
// return v1;
NodeSubject returnSubject =
fnBodySubject
.hasOneChildThat()
.isReturn()
.hasSourceFileName(lastScriptName)
.hasLineno(2)
.hasCharno(2)
.hasLength(10);
// return v1;
// ^^
returnSubject.hasOneChildThat().isName("b").hasLineno(2).hasCharno(9).hasLength(2);
// foo();
assertNode(lastScript)
.hasLastChildThat()
.isExprResult() // foo(); (statement)
.hasSourceFileName(lastScriptName)
.hasLineno(4)
.hasCharno(0)
.hasLength(6)
.hasOneChildThat() // foo() (expression)
.isCall()
.hasSourceFileName(lastScriptName)
.hasLineno(4)
.hasCharno(0)
.hasLength(5)
.hasOneChildThat() // foo (function name)
.isName("a")
.hasLineno(4)
.hasCharno(0)
.hasLength(3);
}
@Test
public void testRenameGlobals() {
test(
"var Foo; var Bar, y; function x() { Bar++; }", //
"var a; var b, c; function d() { b++; }");
}
@Test
public void testRenameLocals() {
test(
"(function (v1, v2) {}); (function (v3, v4) {});",
"(function ( a, b) {}); (function ( a, b) {});");
test(
"function f1(v1, v2) {}; function f2(v3, v4) {};",
"function c( a, b) {}; function d( a, b) {};");
}
@Test
public void testRenameLocals_let() {
test(
"(function () { let var1 = 0; let another = 1; });",
"(function () { let a = 0; let b = 1; });");
}
@Test
public void testRenameLocals_const() {
test(
"(function () { const var1 = 0; const another = 1; });",
"(function () { const a = 0; const b = 1; });");
}
@Test
public void testRenameParamsWithLeadingUnderscores() {
test("(function (_v1, _v2) {});", "(function (a, b) {});");
}
@Test
public void testRenameLocalsToSame() {
preferStableNames = true;
testSame("(function(a) {})");
testSame("(function(a, b) {})");
testSame("(function(a, b, c) {})");
testSame("(function() { var a; })");
testSame("(function() { var a, b; })");
testSame("(function() { var a, b, c; })");
}
@Test
public void testRenameRedeclaredGlobals() {
test(
"""
function f1(v1, v2) {f1()};
/** @suppress {duplicate} */
function f1(v3, v4) {f1()};
""",
"""
function a(b, c) {a()};
/** @suppress {duplicate} */
function a(b, c) {a()};
""");
localRenamingOnly = true;
test(
"""
function f1(v1, v2) {f1()};
/** @suppress {duplicate} */
function f1(v3, v4) {f1()};
""",
"""
function f1(a, b) {f1()};
/** @suppress {duplicate} */
function f1(a, b) {f1()};
""");
}
@Test
public void testRecursiveFunctions1() {
test(
"var walk = function walk(node, aFunction) { walk(node, aFunction); };",
"var a = function a( b, c) { a( b, c); };");
localRenamingOnly = true;
test(
"var walk = function walk(node, aFunction) { walk(node, aFunction); };",
"var walk = function walk( a, b) { walk( a, b); };");
}
@Test
public void testRenameLocalsClashingWithGlobals() {
test(
"function a(v1, v2) {return v1;} a();", //
"function a( b, c) {return b;} a();");
}
@Test
public void testRenameNested() {
test(
"function f1(v1, v2) { (function(v3, v4) {}) }",
"function a( b, c) { (function( d, e) {}) }");
test(
"function f1(v1, v2) { function f2(v3, v4) {} }",
"function a( b, c) { function d( e, f) {} }");
}
@Test
public void testBleedingRecursiveFunctions1() {
// On IE, bleeding functions will interfere with each other if
// they are in the same scope. In the below example, we want to be
// sure that a and b get separate names.
test(
"""
var x = function a(x) { return x ? 1 : a(1); };
var y = function b(x) { return x ? 2 : b(2); };
""",
"""
var c = function b(a) { return a ? 1 : b(1); };
var e = function d(a) { return a ? 2 : d(2); };
""");
}
@Test
public void testBleedingRecursiveFunctions2() {
test(
"""
function f() {
var x = function a(x) { return x ? 1 : a(1); };
var y = function b(x) { return x ? 2 : b(2); };
}
""",
"""
function d() {
var e = function a(b) { return b ? 1 : a(1); };
var f = function c(a) { return a ? 2 : c(2); };
}
""");
}
@Test
public void testBleedingRecursiveFunctions3() {
test(
"""
function f() {
var x = function a(x) { return x ? 1 : a(1); };
var y = function b(x) { return x ? 2 : b(2); };
var z = function c(x) { return x ? y : c(2); };
}
""",
"""
function f() {
var g = function a(c) { return c ? 1 : a(1); };
var d = function b(a) { return a ? 2 : b(2); };
var h = function e(b) { return b ? d : e(2); };
}
""");
}
@Test
public void testBleedingFunctionInBlocks() {
test(
"""
if (true) {
var x = function a(x) {return x;}
}
""",
"""
if (true) {
var c = function b(a) {return a;}
}
""");
}
@Test
public void testRenameWithExterns1() {
String externs = "var foo;";
test(
externs(externs), //
srcs(" var bar; foo(bar);"),
expected("var a; foo( a);"));
}
@Test
public void testRenameWithExterns2() {
String externs = "var a;";
test(
externs(externs), //
srcs(" var b = 5"),
expected("var b = 5"));
}
@Test
public void testDoNotRenameExportedName() {
testSame("_foo()");
}
@Test
public void testDoNotRenameArguments() {
testSame("function a() { arguments; }");
}
@Test
public void testRenameWithNameOverlap() {
testSame("var a = 1; var b = 2; b + b;");
}
@Test
public void testRenameWithPrefix1() {
prefix = "PRE_";
test(
"function Foo(v1, v2) {return v1} Foo();", //
"function PRE_( a, b) {return a} PRE_();");
prefix = DEFAULT_PREFIX;
}
@Test
public void testRenameWithPrefix2() {
prefix = "PRE_";
test(
"function Foo(v1, v2) {var v3 = v1 + v2; return v3;} Foo();",
"function PRE_( a, b) {var c = a + b; return c;} PRE_();");
prefix = DEFAULT_PREFIX;
}
@Test
public void testRenameWithPrefix3() {
prefix = "a";
test(
"""
function Foo() {return 1;}
function Bar() {
var a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,
A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,aa,ab;
Foo();
} Bar();
""",
"""
function a() {return 1;}
function aa() {
var b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,A,
B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,$,ba,ca;
a();
} aa();
""");
prefix = DEFAULT_PREFIX;
}
@Test
public void testNamingBasedOnOrderOfOccurrence() {
test(
"var q,p,m,n,l,k; try { } catch(r) {try {} catch(s) {}}; var t = q + q;",
"var a,b,c,d,e,f; try { } catch(g) {try {} catch(h) {}}; var i = a + a;");
test(
"""
(function(A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,
a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,$){});
var a4,a3,a2,a1,b4,b3,b2,b1,ab,ac,ad,fg;function foo(){};
""",
"""
(function(a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,
A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,$){});
var aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la;function ma(){};
""");
}
@Test
public void testTryCatchLifeTime() {
test(
"var q,p,m,n,l,k; (function(r) {}); try { } catch(s) {}; var t = q + q;",
"var a,c,d,e,f,g; (function(b) {}); try { } catch(b) {}; var h = a + a;");
test(
"try {try {} catch(p) {}} catch(s) {};", //
"try {try {} catch(a) {}} catch(a) {};");
test(
"""
try {
try {
} catch(p) {
try {
} catch(r) {}
}
} catch(s) {
try {
} catch(q) {}
};
""",
"""
try {
try {
} catch(a) {
try {
} catch(b) {}
}
} catch(a) {
try {
} catch(b) {}
};
""");
}
@Test
public void testStableRenameSimple() {
VariableMap expectedVariableMap = makeVariableMap("Foo", "a", "L 0", "b", "L 1", "c");
testRenameMap(
"function Foo(v1, v2) {return v1;} Foo();",
"function a( b, c) {return b;} a();",
expectedVariableMap);
expectedVariableMap = makeVariableMap("Foo", "a", "L 0", "b", "L 1", "c", "L 2", "d");
testRenameMapUsingOldMap(
"function Foo(v1, v2, v3) {return v1;} Foo();",
"function a( b, c, d) {return b;} a();",
expectedVariableMap);
}
@Test
public void testStableRenameGlobals() {
VariableMap expectedVariableMap = makeVariableMap("Foo", "a", "Bar", "b", "y", "c", "x", "d");
testRenameMap(
"var Foo; var Bar, y; function x() { Bar++; }",
"var a; var b, c; function d() { b++; }",
expectedVariableMap);
expectedVariableMap =
makeVariableMap("Foo", "a", "Bar", "b", "y", "c", "x", "d", "Baz", "f", "L 0", "e");
testRenameMapUsingOldMap(
"var Foo, Baz; var Bar, y; function x(R) { return R + Bar++; }",
"var a, f; var b, c; function d(e) { return e + b++; }",
expectedVariableMap);
}
@Test
public void testStableRenameWithPointlesslyAnonymousFunctions() {
VariableMap expectedVariableMap = makeVariableMap("L 0", "a", "L 1", "b");
testRenameMap(
"(function (v1, v2) {}); (function (v3, v4) {});",
"(function ( a, b) {}); (function ( a, b) {});",
expectedVariableMap);
expectedVariableMap = makeVariableMap("L 0", "a", "L 1", "b", "L 2", "c");
testRenameMapUsingOldMap(
"(function (v0, v1, v2) {}); (function (v3, v4) {});",
"(function ( a, b, c) {}); (function ( a, b) {});",
expectedVariableMap);
}
@Test
public void testStableRenameLocalsClashingWithGlobals() {
test("function a(v1, v2) {return v1;} a();", "function a(b, c) {return b;} a();");
previouslyUsedMap = renameVars.getVariableMap();
test(
"function bar(){return;}function a(v1, v2) {return v1;} a();",
"function d(){return;}function a( b, c) {return b;} a();");
}
@Test
public void testStableRenameNested() {
VariableMap expectedVariableMap =
makeVariableMap("f1", "a", "L 0", "b", "L 1", "c", "L 2", "d", "L 3", "e");
testRenameMap(
"function f1(v1, v2) { (function(v3, v4) {}) }",
"function a( b, c) { (function( d, e) {}) }",
expectedVariableMap);
expectedVariableMap =
makeVariableMap("f1", "a", "L 0", "b", "L 1", "c", "L 2", "d", "L 3", "e", "L 4", "f");
testRenameMapUsingOldMap(
"function f1(v1, v2) { (function(v3, v4, v5) {}) }",
"function a( b, c) { (function( d, e, f) {}) }",
expectedVariableMap);
}
@Test
public void testStableRenameWithExterns1() {
String externs = "var foo;";
test(
externs(externs), //
srcs(" var bar; foo(bar);"),
expected("var a; foo( a);"));
previouslyUsedMap = renameVars.getVariableMap();
test(
externs(externs), //
srcs(" var bar, baz; foo(bar, baz);"),
expected("var a, b; foo( a, b);"));
}
@Test
public void testStableRenameWithExterns2() {
String externs = "var a;";
test(
externs(externs), //
srcs(" var b = 5"),
expected("var b = 5"));
previouslyUsedMap = renameVars.getVariableMap();
test(
externs(externs), //
srcs(" var b = 5, catty = 9;"), //
expected("var b = 5, c = 9;"));
}
@Test
public void testStableRenameWithNameOverlap() {
testSame("var a = 1; var b = 2; b + b;");
previouslyUsedMap = renameVars.getVariableMap();
testSame("var a = 1; var c, b = 2; b + b;");
}
@Test
public void testStableRenameWithAnonymousFunctions() {
VariableMap expectedVariableMap = makeVariableMap("L 0", "a", "foo", "b");
testRenameMap(
"function foo(bar){return bar;}foo(function(h){return h;});",
"function b( a){return a;} b(function(a){return a;});",
expectedVariableMap);
expectedVariableMap = makeVariableMap("foo", "b", "L 0", "a", "L 1", "c");
testRenameMapUsingOldMap(
"function foo(bar) {return bar;}foo(function(g,h) {return g+h;});",
"function b(a){return a}b(function(a,c){return a+c;})",
expectedVariableMap);
}
@Test
public void testStableRenameSimpleExternsChanges() {
VariableMap expectedVariableMap = makeVariableMap("Foo", "a", "L 0", "b", "L 1", "c");
testRenameMap(
"function Foo(v1, v2) {return v1;} Foo();",
"function a( b, c) {return b;} a();",
expectedVariableMap);
expectedVariableMap = makeVariableMap("L 0", "b", "L 1", "c", "L 2", "a");
String externs = "var Foo;";
testRenameMapUsingOldMap(
externs,
"function Foo(v1, v2, v0) {return v1;} Foo();",
"function Foo( b, c , a) {return b;} Foo();",
expectedVariableMap);
}
@Test
public void testStableRenameSimpleLocalNameExterned() {
test(
"function Foo(v1, v2) {return v1;} Foo();", //
"function a( b, c) {return b;} a();");
previouslyUsedMap = renameVars.getVariableMap();
String externs = "var b;";
test(
externs(externs),
srcs("function Foo(v1, v2) {return v1;} Foo(b);"),
expected("function a(d, c) {return d;} a(b);"));
}
@Test
public void testStableRenameSimpleGlobalNameExterned() {
test(
"function Foo(v1, v2) {return v1;} Foo();", //
"function a( b, c) {return b;} a();");
previouslyUsedMap = renameVars.getVariableMap();
String externs = "var Foo;";
test(
externs(externs),
srcs("function Foo(v1, v2, v0) {return v1;} Foo();"),
expected("function Foo(b, c, a) {return b;} Foo();"));
}
@Test
public void testStableRenameWithPrefix1AndUnstableLocalNames() {
prefix = "PRE_";
test(
"function Foo(v1, v2) {return v1} Foo();", //
"function PRE_( a, b) {return a} PRE_();");
previouslyUsedMap = renameVars.getVariableMap();
prefix = "PRE_";
test(
"function Foo(v0, v1, v2) {return v1} Foo();", //
"function PRE_( a, b, c) {return b} PRE_();");
}
@Test
public void testStableRenameWithPrefix2() {
prefix = "a";
test(
"""
function Foo() {return 1;}
function Bar() {
var a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,
A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,aa,ab;
Foo();
} Bar();
""",
"""
function a() {return 1;}
function aa() {
var b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,A,
B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,$,ba,ca;
a();
} aa();
""");
previouslyUsedMap = renameVars.getVariableMap();
prefix = "a";
test(
"""
function Foo() {return 1;}
function Baz() {return 1;}
function Bar() {
var a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,
A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,aa,ab;
Foo();
} Bar();
""",
"""
function a() {return 1;}
function ab() {return 1;}
function aa() {
var b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,A,
B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,$,ba,ca;
a();
} aa();
""");
}
@Test
public void testContrivedExampleWhereConsistentRenamingIsWorse() {
previouslyUsedMap = makeVariableMap("Foo", "LongString", "L 0", "b", "L 1", "c");
test(
"function Foo(v1, v2) {return v1;} Foo();",
"function LongString( b, c) {return b;} LongString();");
previouslyUsedMap = renameVars.getVariableMap();
VariableMap expectedVariableMap = makeVariableMap("Foo", "LongString", "L 0", "b", "L 1", "c");
assertVariableMapsEqual(expectedVariableMap, previouslyUsedMap);
}
@Test
public void testPrevUsedMapWithDuplicates() {
try {
makeVariableMap("Foo", "z", "Bar", "z");
testSame("");
throw new AssertionError();
} catch (IllegalArgumentException expected) {
}
}
@Test
public void testExportSimpleSymbolReservesName() {
test(
"var goog, x; goog.exportSymbol('a', x);", //
"var a, b; a.exportSymbol('a', b);");
withClosurePass = true;
test(
"var goog, x; goog.exportSymbol('a', x);", //
"var b, c; b.exportSymbol('a', c);");
}
@Test
public void testExportComplexSymbolReservesName() {
test(
"var goog, x; goog.exportSymbol('a.b', x);", //
"var a, b; a.exportSymbol('a.b', b);");
withClosurePass = true;
test(
"var goog, x; goog.exportSymbol('a.b', x);", //
"var b, c; b.exportSymbol('a.b', c);");
}
@Test
public void testExportToNonStringDoesntExplode() {
withClosurePass = true;
test(
"var goog, a, b; goog.exportSymbol(a, b);", //
"var a, b, c; a.exportSymbol(b, c);");
}
@Test
public void testDollarSignSuperExport1() {
useGoogleCodingConvention = false;
// See http://blickly.github.io/closure-compiler-issues/#32
test(
"var x = function($super,duper,$fantastic){}",
"var c = function($super, a, b){}");
localRenamingOnly = false;
test("var $super = 1", "var a = 1");
useGoogleCodingConvention = true;
test(
"var x = function($super,duper,$fantastic){}", //
"var c = function($super, a, b){}");
}
@Test
public void testDollarSignSuperExport2() {
withNormalize = true;
useGoogleCodingConvention = false;
// See http://blickly.github.io/closure-compiler-issues/#32
test(
"var x = function($super,duper,$fantastic){}; var y = function($super,duper){};",
"var c = function($super, a, b){}; var d = function($super, a){};");
localRenamingOnly = false;
test("var $super = 1", "var a = 1");
useGoogleCodingConvention = true;
test(
"var x = function($super,duper,$fantastic){}; var y = function($super,duper){};",
"var c = function($super, a, b){}; var d = function($super, a){};");
}
@Test
public void testBias() {
nameGenerator =
new DefaultNameGenerator(new HashSet<String>(), "", ImmutableSet.<Character>of());
nameGenerator.favors("AAAAAAAAHH");
test("var x, y", "var A, H");
}
@Test
public void testPseudoNames() {
generatePseudoNames = false;
// See http://blickly.github.io/closure-compiler-issues/#32
test(
"var foo = function(a, b, c){}", //
"var d = function(a, b, c){}");
generatePseudoNames = true;
test(
"var foo = function( a, b, c){}", //
"var $foo$$ = function($a$$, $b$$, $c$$){}");
test(
"var a = function( a, b, c){}", //
"var $a$$ = function($a$$, $b$$, $c$$){}");
}
@Test
public void testArrowFunctions() {
test(
"foo => {return foo + 3;}", //
" a => {return a + 3;}");
test(
"(foo, bar) => {return foo + bar + 3;}", //
"( a, b) => {return a + b + 3;}");
}
@Test
public void testClasses() {
test(
"class fooBar {}", //
"class a {}");
test(
"""
class fooBar {
constructor(foo, bar) {
this.foo = foo;
this.bar = bar;
}
}
var x = new fooBar(2, 3);
""",
"""
class a {
constructor(b, c) {
this.foo = b;
this.bar = c;
}
}
var d = new a(2, 3);
""");
test(
"""
class fooBar {
constructor(foo, bar) {
this.foo = foo;
this.bar = bar;
}
func(x) {
return this.foo + x;
}
}
var x = new fooBar(2,3);
var abcd = x.func(5);
""",
"""
class b {
constructor(a, c) {
this.foo = a;
this.bar = c;
}
func(a) {
return this.foo + a;
}
}
var d = new b(2,3);
var e = d.func(5);
""");
}
@Test
public void testLetConst() {
test(
"let xyz;", //
"let a;");
test(
"const xyz = 1;", //
"const a = 1");
test(
"""
let zyx = 1; {
const xyz = 1;
let zyx = 2;
zyx = 3;
}
let xyz = 'potato';
zyx = 4;
""",
"""
let a = 1; {
const c = 1;
let b = 2;
b = 3;
}
let d = 'potato';
a = 4;
""");
}
@Test
public void testGenerators() {
test(
"""
function* gen() {
var xyz = 3;
yield xyz + 4;
}
gen().next()
""",
"""
function* a() {
var b = 3;
yield b + 4;
}
a().next()
""");
}
@Test
public void testForOf() {
test(
"for (var item of items) {}", //
"for (var a of items) {}");
}
@Test
public void testTemplateStrings() {
test(
"""
var name = 'Foo';
`My name is ${name}`;
""",
"""
var a = 'Foo';
`My name is ${a}`;
""");
}
@Test
public void testArrayDestructuring() {
test(
"var [x, y, z] = [1, 2, 3];", //
"var [a, b, c] = [1, 2, 3];");
}
@Test
public void testObjectDestructuring() {
// TODO(sdh): Teach RenameVars to take advantage of shorthand properties by
// building up a Map from var name strings to property name multisets. We
// should be able to treat this similar to the "previous names" map, where
// we preferentially pick names with the most lined-up properties, provided
// the property names are short (should be easy enough to do the math).
// Note, the same property name could get different var names in different
// scopes, so we probably need to do the comparison per scope.
// Also, this is only relevant if language_out >= ES6.
test(
"""
var obj = {p: 5, h: false};
var {p, h} = obj;
""",
"""
var a = {p: 5, h: false};
var {p: b, h: c} = a;
""");
test(
"""
var obj = {p: 5, h: false};
var {p: x, h: y} = obj;
""",
"""
var a = {p: 5, h: false};
var {p: b, h: c} = a;
""");
}
@Test
public void testDefaultFunction() {
test(
"""
function f(x, y=12) {
return x * y;
}
""",
"""
function c(a, b=12) {
return a * b;
}
""");
}
@Test
public void testRestFunction() {
test(
"""
function f(x, ...y) {
return x * y[0];
}
""",
"""
function c(a, ...b) {
return a * b[0];
}
""");
}
@Test
public void testObjectLiterals() {
test(
"""
var objSuper = {
f: 'potato'
};
var obj = {
__proto__: objSuper,
g: false,
x() {
return super.f;
}
};
obj.x();
""",
"""
var a = {
f: 'potato'
};
var b = {
__proto__: a,
g: false,
x() {
return super.f;
}
};
b.x();
""");
}
@Test
public void testImport1() {
ignoreWarnings(LOAD_WARNING);
test("import name from './other.js'; use(name);", "import a from './other.js'; use(a);");
test(
"import * as name from './other.js'; use(name);",
"import * as a from './other.js'; use(a);");
test(
"import {default as name} from './other.js'; use(name);",
"import {default as a} from './other.js'; use(a);");
}
@Test
public void testImport2() {
ignoreWarnings(LOAD_WARNING);
withNormalize = true;
test(
"import {name} from './other.js'; use(name);",
"import {name as a} from './other.js'; use(a);");
}
private void testRenameMapUsingOldMap(String input, String expected, VariableMap expectedMap) {
previouslyUsedMap = renameVars.getVariableMap();
testRenameMap("", input, expected, expectedMap);
}
private void testRenameMapUsingOldMap(
String externs, String input, String expected, VariableMap expectedMap) {
previouslyUsedMap = renameVars.getVariableMap();
testRenameMap(externs, input, expected, expectedMap);
}
private void testRenameMap(String input, String expected, VariableMap expectedRenameMap) {
testRenameMap("", input, expected, expectedRenameMap);
}
private void testRenameMap(
String externs, String input, String expected, VariableMap expectedRenameMap) {
test(externs(externs), srcs(input), expected(expected));
VariableMap renameMap = renameVars.getVariableMap();
assertVariableMapsEqual(expectedRenameMap, renameMap);
}
@Test
public void testPreferStableNames() {
preferStableNames = true;
// Locals in scopes with too many local variables (>1000) should
// not receive temporary names (eg, 'L 123'). These locals will
// appear in the name maps with the same name as in the code (eg,
// 'a0' in this case).
test(createManyVarFunction(1000), null);
assertThat(renameVars.getVariableMap().lookupNewName("a0")).isNull();
assertThat(renameVars.getVariableMap().lookupNewName("L 0")).isEqualTo("b");
test(createManyVarFunction(1001), null);
assertThat(renameVars.getVariableMap().lookupNewName("a0")).isEqualTo("b");
assertThat(renameVars.getVariableMap().lookupNewName("L 0")).isNull();
// With {@code preferStableNames} off locals should
// unconditionally receive temporary names.
preferStableNames = false;
test(createManyVarFunction(1000), null);
assertThat(renameVars.getVariableMap().lookupNewName("a0")).isNull();
assertThat(renameVars.getVariableMap().lookupNewName("L 0")).isEqualTo("b");
test(createManyVarFunction(1001), null);
assertThat(renameVars.getVariableMap().lookupNewName("a0")).isNull();
assertThat(renameVars.getVariableMap().lookupNewName("L 0")).isEqualTo("b");
}
private static String createManyVarFunction(int numVars) {
List<String> locals = new ArrayList<>();
for (int i = 0; i < numVars; i++) {
locals.add("a" + i);
}
return "function foo() { var " + Joiner.on(",").join(locals) + "; }";
}
private VariableMap makeVariableMap(String... keyValPairs) {
checkArgument(keyValPairs.length % 2 == 0);
ImmutableMap.Builder<String, String> renameMap = ImmutableMap.builder();
for (int i = 0; i < keyValPairs.length; i += 2) {
renameMap.put(keyValPairs[i], keyValPairs[i + 1]);
}
return new VariableMap(renameMap.buildOrThrow());
}
private static void assertVariableMapsEqual(VariableMap a, VariableMap b) {
ImmutableMap<String, String> ma = a.getOriginalNameToNewNameMap();
ImmutableMap<String, String> mb = b.getOriginalNameToNewNameMap();
assertWithMessage("VariableMaps not equal").that(mb).isEqualTo(ma);
}
private class ClosurePassAndRenameVars implements CompilerPass {
private final Compiler compiler;
private ClosurePassAndRenameVars(Compiler compiler) {
this.compiler = compiler;
}
@Override
public void process(Node externs, Node root) {
new GatherModuleMetadata(
compiler, /* processCommonJsModules= */ false, ResolutionMode.BROWSER)
.process(externs, root);
ProcessClosureProvidesAndRequires closurePass =
new ProcessClosureProvidesAndRequires(compiler, true);
closurePass.process(externs, root);
renameVars =
new RenameVars(
compiler,
prefix,
false,
false,
false,
previouslyUsedMap,
ImmutableSet.<Character>of(),
closurePass.getExportedVariableNames(),
new DefaultNameGenerator());
renameVars.process(externs, root);
}
}
private static class NormalizePassWrapper implements CompilerPass {
private final Compiler compiler;
private final CompilerPass wrappedPass;
private NormalizePassWrapper(Compiler compiler, CompilerPass wrappedPass) {
this.compiler = compiler;
this.wrappedPass = wrappedPass;
}
@Override
public void process(Node externs, Node root) {
Normalize normalize = Normalize.createNormalizeForOptimizations(compiler);
normalize.process(externs, root);
wrappedPass.process(externs, root);
}
}
}
|
googleapis/google-cloud-java | 37,158 | java-shopping-merchant-accounts/grpc-google-shopping-merchant-accounts-v1beta/src/main/java/com/google/shopping/merchant/accounts/v1beta/UserServiceGrpc.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.shopping.merchant.accounts.v1beta;
import static io.grpc.MethodDescriptor.generateFullMethodName;
/**
*
*
* <pre>
* Service to support user API.
* </pre>
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler",
comments = "Source: google/shopping/merchant/accounts/v1beta/user.proto")
@io.grpc.stub.annotations.GrpcGenerated
public final class UserServiceGrpc {
private UserServiceGrpc() {}
public static final java.lang.String SERVICE_NAME =
"google.shopping.merchant.accounts.v1beta.UserService";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<
com.google.shopping.merchant.accounts.v1beta.GetUserRequest,
com.google.shopping.merchant.accounts.v1beta.User>
getGetUserMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "GetUser",
requestType = com.google.shopping.merchant.accounts.v1beta.GetUserRequest.class,
responseType = com.google.shopping.merchant.accounts.v1beta.User.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.shopping.merchant.accounts.v1beta.GetUserRequest,
com.google.shopping.merchant.accounts.v1beta.User>
getGetUserMethod() {
io.grpc.MethodDescriptor<
com.google.shopping.merchant.accounts.v1beta.GetUserRequest,
com.google.shopping.merchant.accounts.v1beta.User>
getGetUserMethod;
if ((getGetUserMethod = UserServiceGrpc.getGetUserMethod) == null) {
synchronized (UserServiceGrpc.class) {
if ((getGetUserMethod = UserServiceGrpc.getGetUserMethod) == null) {
UserServiceGrpc.getGetUserMethod =
getGetUserMethod =
io.grpc.MethodDescriptor
.<com.google.shopping.merchant.accounts.v1beta.GetUserRequest,
com.google.shopping.merchant.accounts.v1beta.User>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetUser"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.shopping.merchant.accounts.v1beta.GetUserRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.shopping.merchant.accounts.v1beta.User
.getDefaultInstance()))
.setSchemaDescriptor(new UserServiceMethodDescriptorSupplier("GetUser"))
.build();
}
}
}
return getGetUserMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.shopping.merchant.accounts.v1beta.CreateUserRequest,
com.google.shopping.merchant.accounts.v1beta.User>
getCreateUserMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "CreateUser",
requestType = com.google.shopping.merchant.accounts.v1beta.CreateUserRequest.class,
responseType = com.google.shopping.merchant.accounts.v1beta.User.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.shopping.merchant.accounts.v1beta.CreateUserRequest,
com.google.shopping.merchant.accounts.v1beta.User>
getCreateUserMethod() {
io.grpc.MethodDescriptor<
com.google.shopping.merchant.accounts.v1beta.CreateUserRequest,
com.google.shopping.merchant.accounts.v1beta.User>
getCreateUserMethod;
if ((getCreateUserMethod = UserServiceGrpc.getCreateUserMethod) == null) {
synchronized (UserServiceGrpc.class) {
if ((getCreateUserMethod = UserServiceGrpc.getCreateUserMethod) == null) {
UserServiceGrpc.getCreateUserMethod =
getCreateUserMethod =
io.grpc.MethodDescriptor
.<com.google.shopping.merchant.accounts.v1beta.CreateUserRequest,
com.google.shopping.merchant.accounts.v1beta.User>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "CreateUser"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.shopping.merchant.accounts.v1beta.CreateUserRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.shopping.merchant.accounts.v1beta.User
.getDefaultInstance()))
.setSchemaDescriptor(new UserServiceMethodDescriptorSupplier("CreateUser"))
.build();
}
}
}
return getCreateUserMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.shopping.merchant.accounts.v1beta.DeleteUserRequest, com.google.protobuf.Empty>
getDeleteUserMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "DeleteUser",
requestType = com.google.shopping.merchant.accounts.v1beta.DeleteUserRequest.class,
responseType = com.google.protobuf.Empty.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.shopping.merchant.accounts.v1beta.DeleteUserRequest, com.google.protobuf.Empty>
getDeleteUserMethod() {
io.grpc.MethodDescriptor<
com.google.shopping.merchant.accounts.v1beta.DeleteUserRequest,
com.google.protobuf.Empty>
getDeleteUserMethod;
if ((getDeleteUserMethod = UserServiceGrpc.getDeleteUserMethod) == null) {
synchronized (UserServiceGrpc.class) {
if ((getDeleteUserMethod = UserServiceGrpc.getDeleteUserMethod) == null) {
UserServiceGrpc.getDeleteUserMethod =
getDeleteUserMethod =
io.grpc.MethodDescriptor
.<com.google.shopping.merchant.accounts.v1beta.DeleteUserRequest,
com.google.protobuf.Empty>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "DeleteUser"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.shopping.merchant.accounts.v1beta.DeleteUserRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.protobuf.Empty.getDefaultInstance()))
.setSchemaDescriptor(new UserServiceMethodDescriptorSupplier("DeleteUser"))
.build();
}
}
}
return getDeleteUserMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.shopping.merchant.accounts.v1beta.UpdateUserRequest,
com.google.shopping.merchant.accounts.v1beta.User>
getUpdateUserMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "UpdateUser",
requestType = com.google.shopping.merchant.accounts.v1beta.UpdateUserRequest.class,
responseType = com.google.shopping.merchant.accounts.v1beta.User.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.shopping.merchant.accounts.v1beta.UpdateUserRequest,
com.google.shopping.merchant.accounts.v1beta.User>
getUpdateUserMethod() {
io.grpc.MethodDescriptor<
com.google.shopping.merchant.accounts.v1beta.UpdateUserRequest,
com.google.shopping.merchant.accounts.v1beta.User>
getUpdateUserMethod;
if ((getUpdateUserMethod = UserServiceGrpc.getUpdateUserMethod) == null) {
synchronized (UserServiceGrpc.class) {
if ((getUpdateUserMethod = UserServiceGrpc.getUpdateUserMethod) == null) {
UserServiceGrpc.getUpdateUserMethod =
getUpdateUserMethod =
io.grpc.MethodDescriptor
.<com.google.shopping.merchant.accounts.v1beta.UpdateUserRequest,
com.google.shopping.merchant.accounts.v1beta.User>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "UpdateUser"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.shopping.merchant.accounts.v1beta.UpdateUserRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.shopping.merchant.accounts.v1beta.User
.getDefaultInstance()))
.setSchemaDescriptor(new UserServiceMethodDescriptorSupplier("UpdateUser"))
.build();
}
}
}
return getUpdateUserMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.shopping.merchant.accounts.v1beta.ListUsersRequest,
com.google.shopping.merchant.accounts.v1beta.ListUsersResponse>
getListUsersMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "ListUsers",
requestType = com.google.shopping.merchant.accounts.v1beta.ListUsersRequest.class,
responseType = com.google.shopping.merchant.accounts.v1beta.ListUsersResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.shopping.merchant.accounts.v1beta.ListUsersRequest,
com.google.shopping.merchant.accounts.v1beta.ListUsersResponse>
getListUsersMethod() {
io.grpc.MethodDescriptor<
com.google.shopping.merchant.accounts.v1beta.ListUsersRequest,
com.google.shopping.merchant.accounts.v1beta.ListUsersResponse>
getListUsersMethod;
if ((getListUsersMethod = UserServiceGrpc.getListUsersMethod) == null) {
synchronized (UserServiceGrpc.class) {
if ((getListUsersMethod = UserServiceGrpc.getListUsersMethod) == null) {
UserServiceGrpc.getListUsersMethod =
getListUsersMethod =
io.grpc.MethodDescriptor
.<com.google.shopping.merchant.accounts.v1beta.ListUsersRequest,
com.google.shopping.merchant.accounts.v1beta.ListUsersResponse>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListUsers"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.shopping.merchant.accounts.v1beta.ListUsersRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.shopping.merchant.accounts.v1beta.ListUsersResponse
.getDefaultInstance()))
.setSchemaDescriptor(new UserServiceMethodDescriptorSupplier("ListUsers"))
.build();
}
}
}
return getListUsersMethod;
}
/** Creates a new async stub that supports all call types for the service */
public static UserServiceStub newStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<UserServiceStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<UserServiceStub>() {
@java.lang.Override
public UserServiceStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new UserServiceStub(channel, callOptions);
}
};
return UserServiceStub.newStub(factory, channel);
}
/** Creates a new blocking-style stub that supports all types of calls on the service */
public static UserServiceBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<UserServiceBlockingV2Stub> factory =
new io.grpc.stub.AbstractStub.StubFactory<UserServiceBlockingV2Stub>() {
@java.lang.Override
public UserServiceBlockingV2Stub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new UserServiceBlockingV2Stub(channel, callOptions);
}
};
return UserServiceBlockingV2Stub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static UserServiceBlockingStub newBlockingStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<UserServiceBlockingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<UserServiceBlockingStub>() {
@java.lang.Override
public UserServiceBlockingStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new UserServiceBlockingStub(channel, callOptions);
}
};
return UserServiceBlockingStub.newStub(factory, channel);
}
/** Creates a new ListenableFuture-style stub that supports unary calls on the service */
public static UserServiceFutureStub newFutureStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<UserServiceFutureStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<UserServiceFutureStub>() {
@java.lang.Override
public UserServiceFutureStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new UserServiceFutureStub(channel, callOptions);
}
};
return UserServiceFutureStub.newStub(factory, channel);
}
/**
*
*
* <pre>
* Service to support user API.
* </pre>
*/
public interface AsyncService {
/**
*
*
* <pre>
* Retrieves a Merchant Center account user.
* </pre>
*/
default void getUser(
com.google.shopping.merchant.accounts.v1beta.GetUserRequest request,
io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.User>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetUserMethod(), responseObserver);
}
/**
*
*
* <pre>
* Creates a Merchant Center account user. Executing this method requires
* admin access.
* </pre>
*/
default void createUser(
com.google.shopping.merchant.accounts.v1beta.CreateUserRequest request,
io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.User>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getCreateUserMethod(), responseObserver);
}
/**
*
*
* <pre>
* Deletes a Merchant Center account user. Executing this method requires
* admin access.
* </pre>
*/
default void deleteUser(
com.google.shopping.merchant.accounts.v1beta.DeleteUserRequest request,
io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getDeleteUserMethod(), responseObserver);
}
/**
*
*
* <pre>
* Updates a Merchant Center account user. Executing this method requires
* admin access.
* </pre>
*/
default void updateUser(
com.google.shopping.merchant.accounts.v1beta.UpdateUserRequest request,
io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.User>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getUpdateUserMethod(), responseObserver);
}
/**
*
*
* <pre>
* Lists all users of a Merchant Center account.
* </pre>
*/
default void listUsers(
com.google.shopping.merchant.accounts.v1beta.ListUsersRequest request,
io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.ListUsersResponse>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getListUsersMethod(), responseObserver);
}
}
/**
* Base class for the server implementation of the service UserService.
*
* <pre>
* Service to support user API.
* </pre>
*/
public abstract static class UserServiceImplBase
implements io.grpc.BindableService, AsyncService {
@java.lang.Override
public final io.grpc.ServerServiceDefinition bindService() {
return UserServiceGrpc.bindService(this);
}
}
/**
* A stub to allow clients to do asynchronous rpc calls to service UserService.
*
* <pre>
* Service to support user API.
* </pre>
*/
public static final class UserServiceStub
extends io.grpc.stub.AbstractAsyncStub<UserServiceStub> {
private UserServiceStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected UserServiceStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new UserServiceStub(channel, callOptions);
}
/**
*
*
* <pre>
* Retrieves a Merchant Center account user.
* </pre>
*/
public void getUser(
com.google.shopping.merchant.accounts.v1beta.GetUserRequest request,
io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.User>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getGetUserMethod(), getCallOptions()), request, responseObserver);
}
/**
*
*
* <pre>
* Creates a Merchant Center account user. Executing this method requires
* admin access.
* </pre>
*/
public void createUser(
com.google.shopping.merchant.accounts.v1beta.CreateUserRequest request,
io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.User>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getCreateUserMethod(), getCallOptions()), request, responseObserver);
}
/**
*
*
* <pre>
* Deletes a Merchant Center account user. Executing this method requires
* admin access.
* </pre>
*/
public void deleteUser(
com.google.shopping.merchant.accounts.v1beta.DeleteUserRequest request,
io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getDeleteUserMethod(), getCallOptions()), request, responseObserver);
}
/**
*
*
* <pre>
* Updates a Merchant Center account user. Executing this method requires
* admin access.
* </pre>
*/
public void updateUser(
com.google.shopping.merchant.accounts.v1beta.UpdateUserRequest request,
io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.User>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getUpdateUserMethod(), getCallOptions()), request, responseObserver);
}
/**
*
*
* <pre>
* Lists all users of a Merchant Center account.
* </pre>
*/
public void listUsers(
com.google.shopping.merchant.accounts.v1beta.ListUsersRequest request,
io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.ListUsersResponse>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getListUsersMethod(), getCallOptions()), request, responseObserver);
}
}
/**
* A stub to allow clients to do synchronous rpc calls to service UserService.
*
* <pre>
* Service to support user API.
* </pre>
*/
public static final class UserServiceBlockingV2Stub
extends io.grpc.stub.AbstractBlockingStub<UserServiceBlockingV2Stub> {
private UserServiceBlockingV2Stub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected UserServiceBlockingV2Stub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new UserServiceBlockingV2Stub(channel, callOptions);
}
/**
*
*
* <pre>
* Retrieves a Merchant Center account user.
* </pre>
*/
public com.google.shopping.merchant.accounts.v1beta.User getUser(
com.google.shopping.merchant.accounts.v1beta.GetUserRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetUserMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Creates a Merchant Center account user. Executing this method requires
* admin access.
* </pre>
*/
public com.google.shopping.merchant.accounts.v1beta.User createUser(
com.google.shopping.merchant.accounts.v1beta.CreateUserRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCreateUserMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Deletes a Merchant Center account user. Executing this method requires
* admin access.
* </pre>
*/
public com.google.protobuf.Empty deleteUser(
com.google.shopping.merchant.accounts.v1beta.DeleteUserRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getDeleteUserMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Updates a Merchant Center account user. Executing this method requires
* admin access.
* </pre>
*/
public com.google.shopping.merchant.accounts.v1beta.User updateUser(
com.google.shopping.merchant.accounts.v1beta.UpdateUserRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getUpdateUserMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Lists all users of a Merchant Center account.
* </pre>
*/
public com.google.shopping.merchant.accounts.v1beta.ListUsersResponse listUsers(
com.google.shopping.merchant.accounts.v1beta.ListUsersRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getListUsersMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do limited synchronous rpc calls to service UserService.
*
* <pre>
* Service to support user API.
* </pre>
*/
public static final class UserServiceBlockingStub
extends io.grpc.stub.AbstractBlockingStub<UserServiceBlockingStub> {
private UserServiceBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected UserServiceBlockingStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new UserServiceBlockingStub(channel, callOptions);
}
/**
*
*
* <pre>
* Retrieves a Merchant Center account user.
* </pre>
*/
public com.google.shopping.merchant.accounts.v1beta.User getUser(
com.google.shopping.merchant.accounts.v1beta.GetUserRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetUserMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Creates a Merchant Center account user. Executing this method requires
* admin access.
* </pre>
*/
public com.google.shopping.merchant.accounts.v1beta.User createUser(
com.google.shopping.merchant.accounts.v1beta.CreateUserRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCreateUserMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Deletes a Merchant Center account user. Executing this method requires
* admin access.
* </pre>
*/
public com.google.protobuf.Empty deleteUser(
com.google.shopping.merchant.accounts.v1beta.DeleteUserRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getDeleteUserMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Updates a Merchant Center account user. Executing this method requires
* admin access.
* </pre>
*/
public com.google.shopping.merchant.accounts.v1beta.User updateUser(
com.google.shopping.merchant.accounts.v1beta.UpdateUserRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getUpdateUserMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Lists all users of a Merchant Center account.
* </pre>
*/
public com.google.shopping.merchant.accounts.v1beta.ListUsersResponse listUsers(
com.google.shopping.merchant.accounts.v1beta.ListUsersRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getListUsersMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do ListenableFuture-style rpc calls to service UserService.
*
* <pre>
* Service to support user API.
* </pre>
*/
public static final class UserServiceFutureStub
extends io.grpc.stub.AbstractFutureStub<UserServiceFutureStub> {
private UserServiceFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected UserServiceFutureStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new UserServiceFutureStub(channel, callOptions);
}
/**
*
*
* <pre>
* Retrieves a Merchant Center account user.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.shopping.merchant.accounts.v1beta.User>
getUser(com.google.shopping.merchant.accounts.v1beta.GetUserRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getGetUserMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Creates a Merchant Center account user. Executing this method requires
* admin access.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.shopping.merchant.accounts.v1beta.User>
createUser(com.google.shopping.merchant.accounts.v1beta.CreateUserRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getCreateUserMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Deletes a Merchant Center account user. Executing this method requires
* admin access.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.Empty> deleteUser(
com.google.shopping.merchant.accounts.v1beta.DeleteUserRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getDeleteUserMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Updates a Merchant Center account user. Executing this method requires
* admin access.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.shopping.merchant.accounts.v1beta.User>
updateUser(com.google.shopping.merchant.accounts.v1beta.UpdateUserRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getUpdateUserMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Lists all users of a Merchant Center account.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.shopping.merchant.accounts.v1beta.ListUsersResponse>
listUsers(com.google.shopping.merchant.accounts.v1beta.ListUsersRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getListUsersMethod(), getCallOptions()), request);
}
}
private static final int METHODID_GET_USER = 0;
private static final int METHODID_CREATE_USER = 1;
private static final int METHODID_DELETE_USER = 2;
private static final int METHODID_UPDATE_USER = 3;
private static final int METHODID_LIST_USERS = 4;
private static final class MethodHandlers<Req, Resp>
implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final AsyncService serviceImpl;
private final int methodId;
MethodHandlers(AsyncService serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_GET_USER:
serviceImpl.getUser(
(com.google.shopping.merchant.accounts.v1beta.GetUserRequest) request,
(io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.User>)
responseObserver);
break;
case METHODID_CREATE_USER:
serviceImpl.createUser(
(com.google.shopping.merchant.accounts.v1beta.CreateUserRequest) request,
(io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.User>)
responseObserver);
break;
case METHODID_DELETE_USER:
serviceImpl.deleteUser(
(com.google.shopping.merchant.accounts.v1beta.DeleteUserRequest) request,
(io.grpc.stub.StreamObserver<com.google.protobuf.Empty>) responseObserver);
break;
case METHODID_UPDATE_USER:
serviceImpl.updateUser(
(com.google.shopping.merchant.accounts.v1beta.UpdateUserRequest) request,
(io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.User>)
responseObserver);
break;
case METHODID_LIST_USERS:
serviceImpl.listUsers(
(com.google.shopping.merchant.accounts.v1beta.ListUsersRequest) request,
(io.grpc.stub.StreamObserver<
com.google.shopping.merchant.accounts.v1beta.ListUsersResponse>)
responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
}
public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
getGetUserMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.shopping.merchant.accounts.v1beta.GetUserRequest,
com.google.shopping.merchant.accounts.v1beta.User>(service, METHODID_GET_USER)))
.addMethod(
getCreateUserMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.shopping.merchant.accounts.v1beta.CreateUserRequest,
com.google.shopping.merchant.accounts.v1beta.User>(
service, METHODID_CREATE_USER)))
.addMethod(
getDeleteUserMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.shopping.merchant.accounts.v1beta.DeleteUserRequest,
com.google.protobuf.Empty>(service, METHODID_DELETE_USER)))
.addMethod(
getUpdateUserMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.shopping.merchant.accounts.v1beta.UpdateUserRequest,
com.google.shopping.merchant.accounts.v1beta.User>(
service, METHODID_UPDATE_USER)))
.addMethod(
getListUsersMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.shopping.merchant.accounts.v1beta.ListUsersRequest,
com.google.shopping.merchant.accounts.v1beta.ListUsersResponse>(
service, METHODID_LIST_USERS)))
.build();
}
private abstract static class UserServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoFileDescriptorSupplier,
io.grpc.protobuf.ProtoServiceDescriptorSupplier {
UserServiceBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return com.google.shopping.merchant.accounts.v1beta.UserProto.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("UserService");
}
}
private static final class UserServiceFileDescriptorSupplier
extends UserServiceBaseDescriptorSupplier {
UserServiceFileDescriptorSupplier() {}
}
private static final class UserServiceMethodDescriptorSupplier
extends UserServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final java.lang.String methodName;
UserServiceMethodDescriptorSupplier(java.lang.String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (UserServiceGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor =
result =
io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new UserServiceFileDescriptorSupplier())
.addMethod(getGetUserMethod())
.addMethod(getCreateUserMethod())
.addMethod(getDeleteUserMethod())
.addMethod(getUpdateUserMethod())
.addMethod(getListUsersMethod())
.build();
}
}
}
return result;
}
}
|
googleapis/google-cloud-java | 37,218 | java-shopping-merchant-lfp/google-shopping-merchant-lfp/src/main/java/com/google/shopping/merchant/lfp/v1/LfpStoreServiceClient.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.shopping.merchant.lfp.v1;
import com.google.api.core.ApiFuture;
import com.google.api.core.ApiFutures;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.paging.AbstractFixedSizeCollection;
import com.google.api.gax.paging.AbstractPage;
import com.google.api.gax.paging.AbstractPagedListResponse;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.protobuf.Empty;
import com.google.shopping.merchant.lfp.v1.stub.LfpStoreServiceStub;
import com.google.shopping.merchant.lfp.v1.stub.LfpStoreServiceStubSettings;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Service Description: Service for a [LFP
* partner](https://support.google.com/merchants/answer/7676652) to submit local stores for a
* merchant.
*
* <p>This class provides the ability to make remote calls to the backing service through method
* calls that map to API methods. Sample code to get started:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LfpStoreServiceClient lfpStoreServiceClient = LfpStoreServiceClient.create()) {
* LfpStoreName name = LfpStoreName.of("[ACCOUNT]", "[TARGET_MERCHANT]", "[STORE_CODE]");
* LfpStore response = lfpStoreServiceClient.getLfpStore(name);
* }
* }</pre>
*
* <p>Note: close() needs to be called on the LfpStoreServiceClient object to clean up resources
* such as threads. In the example above, try-with-resources is used, which automatically calls
* close().
*
* <table>
* <caption>Methods</caption>
* <tr>
* <th>Method</th>
* <th>Description</th>
* <th>Method Variants</th>
* </tr>
* <tr>
* <td><p> GetLfpStore</td>
* <td><p> Retrieves information about a store.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> getLfpStore(GetLfpStoreRequest request)
* </ul>
* <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p>
* <ul>
* <li><p> getLfpStore(LfpStoreName name)
* <li><p> getLfpStore(String name)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> getLfpStoreCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> InsertLfpStore</td>
* <td><p> Inserts a store for the target merchant. If the store with the same store code already exists, it will be replaced.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> insertLfpStore(InsertLfpStoreRequest request)
* </ul>
* <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p>
* <ul>
* <li><p> insertLfpStore(AccountName parent, LfpStore lfpStore)
* <li><p> insertLfpStore(String parent, LfpStore lfpStore)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> insertLfpStoreCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> DeleteLfpStore</td>
* <td><p> Deletes a store for a target merchant.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> deleteLfpStore(DeleteLfpStoreRequest request)
* </ul>
* <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p>
* <ul>
* <li><p> deleteLfpStore(LfpStoreName name)
* <li><p> deleteLfpStore(String name)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> deleteLfpStoreCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> ListLfpStores</td>
* <td><p> Lists the stores of the target merchant, specified by the filter in `ListLfpStoresRequest`.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> listLfpStores(ListLfpStoresRequest request)
* </ul>
* <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p>
* <ul>
* <li><p> listLfpStores(AccountName parent)
* <li><p> listLfpStores(String parent)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> listLfpStoresPagedCallable()
* <li><p> listLfpStoresCallable()
* </ul>
* </td>
* </tr>
* </table>
*
* <p>See the individual methods for example code.
*
* <p>Many parameters require resource names to be formatted in a particular way. To assist with
* these names, this class includes a format method for each type of name, and additionally a parse
* method to extract the individual identifiers contained within names that are returned.
*
* <p>This class can be customized by passing in a custom instance of LfpStoreServiceSettings to
* create(). For example:
*
* <p>To customize credentials:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* LfpStoreServiceSettings lfpStoreServiceSettings =
* LfpStoreServiceSettings.newBuilder()
* .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
* .build();
* LfpStoreServiceClient lfpStoreServiceClient =
* LfpStoreServiceClient.create(lfpStoreServiceSettings);
* }</pre>
*
* <p>To customize the endpoint:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* LfpStoreServiceSettings lfpStoreServiceSettings =
* LfpStoreServiceSettings.newBuilder().setEndpoint(myEndpoint).build();
* LfpStoreServiceClient lfpStoreServiceClient =
* LfpStoreServiceClient.create(lfpStoreServiceSettings);
* }</pre>
*
* <p>To use REST (HTTP1.1/JSON) transport (instead of gRPC) for sending and receiving requests over
* the wire:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* LfpStoreServiceSettings lfpStoreServiceSettings =
* LfpStoreServiceSettings.newHttpJsonBuilder().build();
* LfpStoreServiceClient lfpStoreServiceClient =
* LfpStoreServiceClient.create(lfpStoreServiceSettings);
* }</pre>
*
* <p>Please refer to the GitHub repository's samples for more quickstart code snippets.
*/
@Generated("by gapic-generator-java")
public class LfpStoreServiceClient implements BackgroundResource {
private final LfpStoreServiceSettings settings;
private final LfpStoreServiceStub stub;
/** Constructs an instance of LfpStoreServiceClient with default settings. */
public static final LfpStoreServiceClient create() throws IOException {
return create(LfpStoreServiceSettings.newBuilder().build());
}
/**
* Constructs an instance of LfpStoreServiceClient, using the given settings. The channels are
* created based on the settings passed in, or defaults for any settings that are not set.
*/
public static final LfpStoreServiceClient create(LfpStoreServiceSettings settings)
throws IOException {
return new LfpStoreServiceClient(settings);
}
/**
* Constructs an instance of LfpStoreServiceClient, using the given stub for making calls. This is
* for advanced usage - prefer using create(LfpStoreServiceSettings).
*/
public static final LfpStoreServiceClient create(LfpStoreServiceStub stub) {
return new LfpStoreServiceClient(stub);
}
/**
* Constructs an instance of LfpStoreServiceClient, using the given settings. This is protected so
* that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected LfpStoreServiceClient(LfpStoreServiceSettings settings) throws IOException {
this.settings = settings;
this.stub = ((LfpStoreServiceStubSettings) settings.getStubSettings()).createStub();
}
protected LfpStoreServiceClient(LfpStoreServiceStub stub) {
this.settings = null;
this.stub = stub;
}
public final LfpStoreServiceSettings getSettings() {
return settings;
}
public LfpStoreServiceStub getStub() {
return stub;
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Retrieves information about a store.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LfpStoreServiceClient lfpStoreServiceClient = LfpStoreServiceClient.create()) {
* LfpStoreName name = LfpStoreName.of("[ACCOUNT]", "[TARGET_MERCHANT]", "[STORE_CODE]");
* LfpStore response = lfpStoreServiceClient.getLfpStore(name);
* }
* }</pre>
*
* @param name Required. The name of the store to retrieve. Format:
* `accounts/{account}/lfpStores/{target_merchant}~{store_code}`
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final LfpStore getLfpStore(LfpStoreName name) {
GetLfpStoreRequest request =
GetLfpStoreRequest.newBuilder().setName(name == null ? null : name.toString()).build();
return getLfpStore(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Retrieves information about a store.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LfpStoreServiceClient lfpStoreServiceClient = LfpStoreServiceClient.create()) {
* String name = LfpStoreName.of("[ACCOUNT]", "[TARGET_MERCHANT]", "[STORE_CODE]").toString();
* LfpStore response = lfpStoreServiceClient.getLfpStore(name);
* }
* }</pre>
*
* @param name Required. The name of the store to retrieve. Format:
* `accounts/{account}/lfpStores/{target_merchant}~{store_code}`
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final LfpStore getLfpStore(String name) {
GetLfpStoreRequest request = GetLfpStoreRequest.newBuilder().setName(name).build();
return getLfpStore(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Retrieves information about a store.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LfpStoreServiceClient lfpStoreServiceClient = LfpStoreServiceClient.create()) {
* GetLfpStoreRequest request =
* GetLfpStoreRequest.newBuilder()
* .setName(LfpStoreName.of("[ACCOUNT]", "[TARGET_MERCHANT]", "[STORE_CODE]").toString())
* .build();
* LfpStore response = lfpStoreServiceClient.getLfpStore(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final LfpStore getLfpStore(GetLfpStoreRequest request) {
return getLfpStoreCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Retrieves information about a store.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LfpStoreServiceClient lfpStoreServiceClient = LfpStoreServiceClient.create()) {
* GetLfpStoreRequest request =
* GetLfpStoreRequest.newBuilder()
* .setName(LfpStoreName.of("[ACCOUNT]", "[TARGET_MERCHANT]", "[STORE_CODE]").toString())
* .build();
* ApiFuture<LfpStore> future = lfpStoreServiceClient.getLfpStoreCallable().futureCall(request);
* // Do something.
* LfpStore response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<GetLfpStoreRequest, LfpStore> getLfpStoreCallable() {
return stub.getLfpStoreCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Inserts a store for the target merchant. If the store with the same store code already exists,
* it will be replaced.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LfpStoreServiceClient lfpStoreServiceClient = LfpStoreServiceClient.create()) {
* AccountName parent = AccountName.of("[ACCOUNT]");
* LfpStore lfpStore = LfpStore.newBuilder().build();
* LfpStore response = lfpStoreServiceClient.insertLfpStore(parent, lfpStore);
* }
* }</pre>
*
* @param parent Required. The LFP provider account Format: `accounts/{account}`
* @param lfpStore Required. The store to insert.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final LfpStore insertLfpStore(AccountName parent, LfpStore lfpStore) {
InsertLfpStoreRequest request =
InsertLfpStoreRequest.newBuilder()
.setParent(parent == null ? null : parent.toString())
.setLfpStore(lfpStore)
.build();
return insertLfpStore(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Inserts a store for the target merchant. If the store with the same store code already exists,
* it will be replaced.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LfpStoreServiceClient lfpStoreServiceClient = LfpStoreServiceClient.create()) {
* String parent = AccountName.of("[ACCOUNT]").toString();
* LfpStore lfpStore = LfpStore.newBuilder().build();
* LfpStore response = lfpStoreServiceClient.insertLfpStore(parent, lfpStore);
* }
* }</pre>
*
* @param parent Required. The LFP provider account Format: `accounts/{account}`
* @param lfpStore Required. The store to insert.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final LfpStore insertLfpStore(String parent, LfpStore lfpStore) {
InsertLfpStoreRequest request =
InsertLfpStoreRequest.newBuilder().setParent(parent).setLfpStore(lfpStore).build();
return insertLfpStore(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Inserts a store for the target merchant. If the store with the same store code already exists,
* it will be replaced.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LfpStoreServiceClient lfpStoreServiceClient = LfpStoreServiceClient.create()) {
* InsertLfpStoreRequest request =
* InsertLfpStoreRequest.newBuilder()
* .setParent(AccountName.of("[ACCOUNT]").toString())
* .setLfpStore(LfpStore.newBuilder().build())
* .build();
* LfpStore response = lfpStoreServiceClient.insertLfpStore(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final LfpStore insertLfpStore(InsertLfpStoreRequest request) {
return insertLfpStoreCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Inserts a store for the target merchant. If the store with the same store code already exists,
* it will be replaced.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LfpStoreServiceClient lfpStoreServiceClient = LfpStoreServiceClient.create()) {
* InsertLfpStoreRequest request =
* InsertLfpStoreRequest.newBuilder()
* .setParent(AccountName.of("[ACCOUNT]").toString())
* .setLfpStore(LfpStore.newBuilder().build())
* .build();
* ApiFuture<LfpStore> future =
* lfpStoreServiceClient.insertLfpStoreCallable().futureCall(request);
* // Do something.
* LfpStore response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<InsertLfpStoreRequest, LfpStore> insertLfpStoreCallable() {
return stub.insertLfpStoreCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Deletes a store for a target merchant.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LfpStoreServiceClient lfpStoreServiceClient = LfpStoreServiceClient.create()) {
* LfpStoreName name = LfpStoreName.of("[ACCOUNT]", "[TARGET_MERCHANT]", "[STORE_CODE]");
* lfpStoreServiceClient.deleteLfpStore(name);
* }
* }</pre>
*
* @param name Required. The name of the store to delete for the target merchant account. Format:
* `accounts/{account}/lfpStores/{target_merchant}~{store_code}`
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final void deleteLfpStore(LfpStoreName name) {
DeleteLfpStoreRequest request =
DeleteLfpStoreRequest.newBuilder().setName(name == null ? null : name.toString()).build();
deleteLfpStore(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Deletes a store for a target merchant.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LfpStoreServiceClient lfpStoreServiceClient = LfpStoreServiceClient.create()) {
* String name = LfpStoreName.of("[ACCOUNT]", "[TARGET_MERCHANT]", "[STORE_CODE]").toString();
* lfpStoreServiceClient.deleteLfpStore(name);
* }
* }</pre>
*
* @param name Required. The name of the store to delete for the target merchant account. Format:
* `accounts/{account}/lfpStores/{target_merchant}~{store_code}`
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final void deleteLfpStore(String name) {
DeleteLfpStoreRequest request = DeleteLfpStoreRequest.newBuilder().setName(name).build();
deleteLfpStore(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Deletes a store for a target merchant.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LfpStoreServiceClient lfpStoreServiceClient = LfpStoreServiceClient.create()) {
* DeleteLfpStoreRequest request =
* DeleteLfpStoreRequest.newBuilder()
* .setName(LfpStoreName.of("[ACCOUNT]", "[TARGET_MERCHANT]", "[STORE_CODE]").toString())
* .build();
* lfpStoreServiceClient.deleteLfpStore(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final void deleteLfpStore(DeleteLfpStoreRequest request) {
deleteLfpStoreCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Deletes a store for a target merchant.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LfpStoreServiceClient lfpStoreServiceClient = LfpStoreServiceClient.create()) {
* DeleteLfpStoreRequest request =
* DeleteLfpStoreRequest.newBuilder()
* .setName(LfpStoreName.of("[ACCOUNT]", "[TARGET_MERCHANT]", "[STORE_CODE]").toString())
* .build();
* ApiFuture<Empty> future = lfpStoreServiceClient.deleteLfpStoreCallable().futureCall(request);
* // Do something.
* future.get();
* }
* }</pre>
*/
public final UnaryCallable<DeleteLfpStoreRequest, Empty> deleteLfpStoreCallable() {
return stub.deleteLfpStoreCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Lists the stores of the target merchant, specified by the filter in `ListLfpStoresRequest`.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LfpStoreServiceClient lfpStoreServiceClient = LfpStoreServiceClient.create()) {
* AccountName parent = AccountName.of("[ACCOUNT]");
* for (LfpStore element : lfpStoreServiceClient.listLfpStores(parent).iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*
* @param parent Required. The LFP partner. Format: `accounts/{account}`
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ListLfpStoresPagedResponse listLfpStores(AccountName parent) {
ListLfpStoresRequest request =
ListLfpStoresRequest.newBuilder()
.setParent(parent == null ? null : parent.toString())
.build();
return listLfpStores(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Lists the stores of the target merchant, specified by the filter in `ListLfpStoresRequest`.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LfpStoreServiceClient lfpStoreServiceClient = LfpStoreServiceClient.create()) {
* String parent = AccountName.of("[ACCOUNT]").toString();
* for (LfpStore element : lfpStoreServiceClient.listLfpStores(parent).iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*
* @param parent Required. The LFP partner. Format: `accounts/{account}`
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ListLfpStoresPagedResponse listLfpStores(String parent) {
ListLfpStoresRequest request = ListLfpStoresRequest.newBuilder().setParent(parent).build();
return listLfpStores(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Lists the stores of the target merchant, specified by the filter in `ListLfpStoresRequest`.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LfpStoreServiceClient lfpStoreServiceClient = LfpStoreServiceClient.create()) {
* ListLfpStoresRequest request =
* ListLfpStoresRequest.newBuilder()
* .setParent(AccountName.of("[ACCOUNT]").toString())
* .setTargetAccount(-475823745)
* .setPageSize(883849137)
* .setPageToken("pageToken873572522")
* .build();
* for (LfpStore element : lfpStoreServiceClient.listLfpStores(request).iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ListLfpStoresPagedResponse listLfpStores(ListLfpStoresRequest request) {
return listLfpStoresPagedCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Lists the stores of the target merchant, specified by the filter in `ListLfpStoresRequest`.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LfpStoreServiceClient lfpStoreServiceClient = LfpStoreServiceClient.create()) {
* ListLfpStoresRequest request =
* ListLfpStoresRequest.newBuilder()
* .setParent(AccountName.of("[ACCOUNT]").toString())
* .setTargetAccount(-475823745)
* .setPageSize(883849137)
* .setPageToken("pageToken873572522")
* .build();
* ApiFuture<LfpStore> future =
* lfpStoreServiceClient.listLfpStoresPagedCallable().futureCall(request);
* // Do something.
* for (LfpStore element : future.get().iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*/
public final UnaryCallable<ListLfpStoresRequest, ListLfpStoresPagedResponse>
listLfpStoresPagedCallable() {
return stub.listLfpStoresPagedCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Lists the stores of the target merchant, specified by the filter in `ListLfpStoresRequest`.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LfpStoreServiceClient lfpStoreServiceClient = LfpStoreServiceClient.create()) {
* ListLfpStoresRequest request =
* ListLfpStoresRequest.newBuilder()
* .setParent(AccountName.of("[ACCOUNT]").toString())
* .setTargetAccount(-475823745)
* .setPageSize(883849137)
* .setPageToken("pageToken873572522")
* .build();
* while (true) {
* ListLfpStoresResponse response =
* lfpStoreServiceClient.listLfpStoresCallable().call(request);
* for (LfpStore element : response.getLfpStoresList()) {
* // doThingsWith(element);
* }
* String nextPageToken = response.getNextPageToken();
* if (!Strings.isNullOrEmpty(nextPageToken)) {
* request = request.toBuilder().setPageToken(nextPageToken).build();
* } else {
* break;
* }
* }
* }
* }</pre>
*/
public final UnaryCallable<ListLfpStoresRequest, ListLfpStoresResponse> listLfpStoresCallable() {
return stub.listLfpStoresCallable();
}
@Override
public final void close() {
stub.close();
}
@Override
public void shutdown() {
stub.shutdown();
}
@Override
public boolean isShutdown() {
return stub.isShutdown();
}
@Override
public boolean isTerminated() {
return stub.isTerminated();
}
@Override
public void shutdownNow() {
stub.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return stub.awaitTermination(duration, unit);
}
public static class ListLfpStoresPagedResponse
extends AbstractPagedListResponse<
ListLfpStoresRequest,
ListLfpStoresResponse,
LfpStore,
ListLfpStoresPage,
ListLfpStoresFixedSizeCollection> {
public static ApiFuture<ListLfpStoresPagedResponse> createAsync(
PageContext<ListLfpStoresRequest, ListLfpStoresResponse, LfpStore> context,
ApiFuture<ListLfpStoresResponse> futureResponse) {
ApiFuture<ListLfpStoresPage> futurePage =
ListLfpStoresPage.createEmptyPage().createPageAsync(context, futureResponse);
return ApiFutures.transform(
futurePage,
input -> new ListLfpStoresPagedResponse(input),
MoreExecutors.directExecutor());
}
private ListLfpStoresPagedResponse(ListLfpStoresPage page) {
super(page, ListLfpStoresFixedSizeCollection.createEmptyCollection());
}
}
public static class ListLfpStoresPage
extends AbstractPage<
ListLfpStoresRequest, ListLfpStoresResponse, LfpStore, ListLfpStoresPage> {
private ListLfpStoresPage(
PageContext<ListLfpStoresRequest, ListLfpStoresResponse, LfpStore> context,
ListLfpStoresResponse response) {
super(context, response);
}
private static ListLfpStoresPage createEmptyPage() {
return new ListLfpStoresPage(null, null);
}
@Override
protected ListLfpStoresPage createPage(
PageContext<ListLfpStoresRequest, ListLfpStoresResponse, LfpStore> context,
ListLfpStoresResponse response) {
return new ListLfpStoresPage(context, response);
}
@Override
public ApiFuture<ListLfpStoresPage> createPageAsync(
PageContext<ListLfpStoresRequest, ListLfpStoresResponse, LfpStore> context,
ApiFuture<ListLfpStoresResponse> futureResponse) {
return super.createPageAsync(context, futureResponse);
}
}
public static class ListLfpStoresFixedSizeCollection
extends AbstractFixedSizeCollection<
ListLfpStoresRequest,
ListLfpStoresResponse,
LfpStore,
ListLfpStoresPage,
ListLfpStoresFixedSizeCollection> {
private ListLfpStoresFixedSizeCollection(List<ListLfpStoresPage> pages, int collectionSize) {
super(pages, collectionSize);
}
private static ListLfpStoresFixedSizeCollection createEmptyCollection() {
return new ListLfpStoresFixedSizeCollection(null, 0);
}
@Override
protected ListLfpStoresFixedSizeCollection createCollection(
List<ListLfpStoresPage> pages, int collectionSize) {
return new ListLfpStoresFixedSizeCollection(pages, collectionSize);
}
}
}
|
apache/commons-text | 37,095 | src/main/java/org/apache/commons/text/StrTokenizer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.text;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.ListIterator;
import java.util.NoSuchElementException;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
/**
* Tokenizes a string based on delimiters (separators)
* and supporting quoting and ignored character concepts.
* <p>
* This class can split a String into many smaller strings. It aims
* to do a similar job to {@link java.util.StringTokenizer StringTokenizer},
* however it offers much more control and flexibility including implementing
* the {@code ListIterator} interface. By default, it is set up
* like {@code StringTokenizer}.
* <p>
* The input String is split into a number of <em>tokens</em>.
* Each token is separated from the next String by a <em>delimiter</em>.
* One or more delimiter characters must be specified.
* <p>
* Each token may be surrounded by quotes.
* The <em>quote</em> matcher specifies the quote character(s).
* A quote may be escaped within a quoted section by duplicating itself.
* <p>
* Between each token and the delimiter are potentially characters that need trimming.
* The <em>trimmer</em> matcher specifies these characters.
* One usage might be to trim whitespace characters.
* <p>
* At any point outside the quotes there might potentially be invalid characters.
* The <em>ignored</em> matcher specifies these characters to be removed.
* One usage might be to remove new line characters.
* <p>
* Empty tokens may be removed or returned as null.
* <pre>
* "a,b,c" - Three tokens "a","b","c" (comma delimiter)
* " a, b , c " - Three tokens "a","b","c" (default CSV processing trims whitespace)
* "a, ", b ,", c" - Three tokens "a, " , " b ", ", c" (quoted text untouched)
* </pre>
*
* <table>
* <caption>StrTokenizer properties and options</caption>
* <tr>
* <th>Property</th><th>Type</th><th>Default</th>
* </tr>
* <tr>
* <td>delim</td><td>CharSetMatcher</td><td>{ \t\n\r\f}</td>
* </tr>
* <tr>
* <td>quote</td><td>NoneMatcher</td><td>{}</td>
* </tr>
* <tr>
* <td>ignore</td><td>NoneMatcher</td><td>{}</td>
* </tr>
* <tr>
* <td>emptyTokenAsNull</td><td>boolean</td><td>false</td>
* </tr>
* <tr>
* <td>ignoreEmptyTokens</td><td>boolean</td><td>true</td>
* </tr>
* </table>
*
* @since 1.0
* @deprecated Deprecated as of 1.3, use {@link StringTokenizer} instead. This class will be removed in 2.0.
*/
@Deprecated
public class StrTokenizer implements ListIterator<String>, Cloneable {
/** Comma separated values tokenizer internal variable. */
// @formatter:off
private static final StrTokenizer CSV_TOKENIZER_PROTOTYPE = new StrTokenizer()
.setDelimiterMatcher(StrMatcher.commaMatcher())
.setQuoteMatcher(StrMatcher.doubleQuoteMatcher())
.setIgnoredMatcher(StrMatcher.noneMatcher())
.setTrimmerMatcher(StrMatcher.trimMatcher())
.setEmptyTokenAsNull(false)
.setIgnoreEmptyTokens(false);
// @formatter:on
/** Tab separated values tokenizer internal variable. */
// @formatter:off
private static final StrTokenizer TSV_TOKENIZER_PROTOTYPE = new StrTokenizer()
.setDelimiterMatcher(StrMatcher.tabMatcher())
.setQuoteMatcher(StrMatcher.doubleQuoteMatcher())
.setIgnoredMatcher(StrMatcher.noneMatcher())
.setTrimmerMatcher(StrMatcher.trimMatcher())
.setEmptyTokenAsNull(false)
.setIgnoreEmptyTokens(false);
// @formatter:on
/**
* Returns a clone of {@code CSV_TOKENIZER_PROTOTYPE}.
*
* @return a clone of {@code CSV_TOKENIZER_PROTOTYPE}.
*/
private static StrTokenizer getCSVClone() {
return (StrTokenizer) CSV_TOKENIZER_PROTOTYPE.clone();
}
/**
* Gets a new tokenizer instance which parses Comma Separated Value strings
* initializing it with the given input. The default for CSV processing
* will be trim whitespace from both ends (which can be overridden with
* the setTrimmer method).
* <p>
* You must call a "reset" method to set the string which you want to parse.
* </p>
* @return a new tokenizer instance which parses Comma Separated Value strings
*/
public static StrTokenizer getCSVInstance() {
return getCSVClone();
}
/**
* Gets a new tokenizer instance which parses Comma Separated Value strings
* initializing it with the given input. The default for CSV processing
* will be trim whitespace from both ends (which can be overridden with
* the setTrimmer method).
*
* @param input the text to parse
* @return a new tokenizer instance which parses Comma Separated Value strings
*/
public static StrTokenizer getCSVInstance(final char[] input) {
final StrTokenizer tok = getCSVClone();
tok.reset(input);
return tok;
}
/**
* Gets a new tokenizer instance which parses Comma Separated Value strings
* initializing it with the given input. The default for CSV processing
* will be trim whitespace from both ends (which can be overridden with
* the setTrimmer method).
*
* @param input the text to parse
* @return a new tokenizer instance which parses Comma Separated Value strings
*/
public static StrTokenizer getCSVInstance(final String input) {
final StrTokenizer tok = getCSVClone();
tok.reset(input);
return tok;
}
/**
* Returns a clone of {@code TSV_TOKENIZER_PROTOTYPE}.
*
* @return a clone of {@code TSV_TOKENIZER_PROTOTYPE}.
*/
private static StrTokenizer getTSVClone() {
return (StrTokenizer) TSV_TOKENIZER_PROTOTYPE.clone();
}
/**
* Gets a new tokenizer instance which parses Tab Separated Value strings.
* The default for CSV processing will be trim whitespace from both ends
* (which can be overridden with the setTrimmer method).
* <p>
* You must call a "reset" method to set the string which you want to parse.
* </p>
* @return a new tokenizer instance which parses Tab Separated Value strings.
*/
public static StrTokenizer getTSVInstance() {
return getTSVClone();
}
/**
* Gets a new tokenizer instance which parses Tab Separated Value strings.
* The default for CSV processing will be trim whitespace from both ends
* (which can be overridden with the setTrimmer method).
* @param input the string to parse
* @return a new tokenizer instance which parses Tab Separated Value strings.
*/
public static StrTokenizer getTSVInstance(final char[] input) {
final StrTokenizer tok = getTSVClone();
tok.reset(input);
return tok;
}
/**
* Gets a new tokenizer instance which parses Tab Separated Value strings.
* The default for CSV processing will be trim whitespace from both ends
* (which can be overridden with the setTrimmer method).
* @param input the string to parse
* @return a new tokenizer instance which parses Tab Separated Value strings.
*/
public static StrTokenizer getTSVInstance(final String input) {
final StrTokenizer tok = getTSVClone();
tok.reset(input);
return tok;
}
/** The text to work on. */
private char[] chars;
/** The parsed tokens. */
private String[] tokens;
/** The current iteration position. */
private int tokenPos;
/** The delimiter matcher. */
private StrMatcher delimMatcher = StrMatcher.splitMatcher();
/** The quote matcher. */
private StrMatcher quoteMatcher = StrMatcher.noneMatcher();
/** The ignored matcher. */
private StrMatcher ignoredMatcher = StrMatcher.noneMatcher();
/** The trimmer matcher. */
private StrMatcher trimmerMatcher = StrMatcher.noneMatcher();
/** Whether to return empty tokens as null. */
private boolean emptyAsNull;
/** Whether to ignore empty tokens. */
private boolean ignoreEmptyTokens = true;
/**
* Constructs a tokenizer splitting on space, tab, newline and form feed
* as per StringTokenizer, but with no text to tokenize.
* <p>
* This constructor is normally used with {@link #reset(String)}.
* </p>
*/
public StrTokenizer() {
this.chars = null;
}
/**
* Constructs a tokenizer splitting on space, tab, newline and form feed
* as per StringTokenizer.
*
* @param input the string which is to be parsed, not cloned
*/
public StrTokenizer(final char[] input) {
if (input == null) {
this.chars = null;
} else {
this.chars = input.clone();
}
}
/**
* Constructs a tokenizer splitting on the specified character.
*
* @param input the string which is to be parsed, not cloned
* @param delim the field delimiter character
*/
public StrTokenizer(final char[] input, final char delim) {
this(input);
setDelimiterChar(delim);
}
/**
* Constructs a tokenizer splitting on the specified delimiter character
* and handling quotes using the specified quote character.
*
* @param input the string which is to be parsed, not cloned
* @param delim the field delimiter character
* @param quote the field quoted string character
*/
public StrTokenizer(final char[] input, final char delim, final char quote) {
this(input, delim);
setQuoteChar(quote);
}
/**
* Constructs a tokenizer splitting on the specified string.
*
* @param input the string which is to be parsed, not cloned
* @param delim the field delimiter string
*/
public StrTokenizer(final char[] input, final String delim) {
this(input);
setDelimiterString(delim);
}
/**
* Constructs a tokenizer splitting using the specified delimiter matcher.
*
* @param input the string which is to be parsed, not cloned
* @param delim the field delimiter matcher
*/
public StrTokenizer(final char[] input, final StrMatcher delim) {
this(input);
setDelimiterMatcher(delim);
}
/**
* Constructs a tokenizer splitting using the specified delimiter matcher
* and handling quotes using the specified quote matcher.
*
* @param input the string which is to be parsed, not cloned
* @param delim the field delimiter character
* @param quote the field quoted string character
*/
public StrTokenizer(final char[] input, final StrMatcher delim, final StrMatcher quote) {
this(input, delim);
setQuoteMatcher(quote);
}
/**
* Constructs a tokenizer splitting on space, tab, newline and form feed
* as per StringTokenizer.
*
* @param input the string which is to be parsed
*/
public StrTokenizer(final String input) {
if (input != null) {
chars = input.toCharArray();
} else {
chars = null;
}
}
/**
* Constructs a tokenizer splitting on the specified delimiter character.
*
* @param input the string which is to be parsed
* @param delim the field delimiter character
*/
public StrTokenizer(final String input, final char delim) {
this(input);
setDelimiterChar(delim);
}
/**
* Constructs a tokenizer splitting on the specified delimiter character
* and handling quotes using the specified quote character.
*
* @param input the string which is to be parsed
* @param delim the field delimiter character
* @param quote the field quoted string character
*/
public StrTokenizer(final String input, final char delim, final char quote) {
this(input, delim);
setQuoteChar(quote);
}
/**
* Constructs a tokenizer splitting on the specified delimiter string.
*
* @param input the string which is to be parsed
* @param delim the field delimiter string
*/
public StrTokenizer(final String input, final String delim) {
this(input);
setDelimiterString(delim);
}
/**
* Constructs a tokenizer splitting using the specified delimiter matcher.
*
* @param input the string which is to be parsed
* @param delim the field delimiter matcher
*/
public StrTokenizer(final String input, final StrMatcher delim) {
this(input);
setDelimiterMatcher(delim);
}
/**
* Constructs a tokenizer splitting using the specified delimiter matcher
* and handling quotes using the specified quote matcher.
*
* @param input the string which is to be parsed
* @param delim the field delimiter matcher
* @param quote the field quoted string matcher
*/
public StrTokenizer(final String input, final StrMatcher delim, final StrMatcher quote) {
this(input, delim);
setQuoteMatcher(quote);
}
/**
* Unsupported ListIterator operation.
* @param obj this parameter ignored.
* @throws UnsupportedOperationException always
*/
@Override
public void add(final String obj) {
throw new UnsupportedOperationException("add() is unsupported");
}
/**
* Adds a token to a list, paying attention to the parameters we've set.
*
* @param list the list to add to
* @param tok the token to add
*/
private void addToken(final List<String> list, String tok) {
if (tok == null || tok.isEmpty()) {
if (isIgnoreEmptyTokens()) {
return;
}
if (isEmptyTokenAsNull()) {
tok = null;
}
}
list.add(tok);
}
/**
* Checks if tokenization has been done, and if not then do it.
*/
private void checkTokenized() {
if (tokens == null) {
if (chars == null) {
// still call tokenize as subclass may do some work
final List<String> split = tokenize(null, 0, 0);
tokens = split.toArray(ArrayUtils.EMPTY_STRING_ARRAY);
} else {
final List<String> split = tokenize(chars, 0, chars.length);
tokens = split.toArray(ArrayUtils.EMPTY_STRING_ARRAY);
}
}
}
/**
* Creates a new instance of this Tokenizer. The new instance is reset so
* that it will be at the start of the token list.
* If a {@link CloneNotSupportedException} is caught, return {@code null}.
*
* @return a new instance of this Tokenizer which has been reset.
*/
@Override
public Object clone() {
try {
return cloneReset();
} catch (final CloneNotSupportedException ex) {
return null;
}
}
/**
* Creates a new instance of this Tokenizer. The new instance is reset so that
* it will be at the start of the token list.
*
* @return a new instance of this Tokenizer which has been reset.
* @throws CloneNotSupportedException if there is a problem cloning
*/
Object cloneReset() throws CloneNotSupportedException {
// this method exists to enable 100% test coverage
final StrTokenizer cloned = (StrTokenizer) super.clone();
if (cloned.chars != null) {
cloned.chars = cloned.chars.clone();
}
cloned.reset();
return cloned;
}
/**
* Gets the String content that the tokenizer is parsing.
*
* @return The string content being parsed
*/
public String getContent() {
if (chars == null) {
return null;
}
return new String(chars);
}
/**
* Gets the field delimiter matcher.
*
* @return The delimiter matcher in use
*/
public StrMatcher getDelimiterMatcher() {
return this.delimMatcher;
}
/**
* Gets the ignored character matcher.
* <p>
* These characters are ignored when parsing the String, unless they are
* within a quoted region.
* The default value is not to ignore anything.
* </p>
*
* @return The ignored matcher in use
*/
public StrMatcher getIgnoredMatcher() {
return ignoredMatcher;
}
/**
* Gets the quote matcher currently in use.
* <p>
* The quote character is used to wrap data between the tokens.
* This enables delimiters to be entered as data.
* The default value is '"' (double quote).
* </p>
*
* @return The quote matcher in use
*/
public StrMatcher getQuoteMatcher() {
return quoteMatcher;
}
/**
* Gets a copy of the full token list as an independent modifiable array.
*
* @return The tokens as a String array
*/
public String[] getTokenArray() {
checkTokenized();
return tokens.clone();
}
/**
* Gets a copy of the full token list as an independent modifiable list.
*
* @return The tokens as a String array
*/
public List<String> getTokenList() {
checkTokenized();
final List<String> list = new ArrayList<>(tokens.length);
Collections.addAll(list, tokens);
return list;
}
/**
* Gets the trimmer character matcher.
* <p>
* These characters are trimmed off on each side of the delimiter
* until the token or quote is found.
* The default value is not to trim anything.
* </p>
*
* @return The trimmer matcher in use
*/
public StrMatcher getTrimmerMatcher() {
return trimmerMatcher;
}
/**
* Checks whether there are any more tokens.
*
* @return true if there are more tokens
*/
@Override
public boolean hasNext() {
checkTokenized();
return tokenPos < tokens.length;
}
/**
* Checks whether there are any previous tokens that can be iterated to.
*
* @return true if there are previous tokens
*/
@Override
public boolean hasPrevious() {
checkTokenized();
return tokenPos > 0;
}
/**
* Gets whether the tokenizer currently returns empty tokens as null.
* The default for this property is false.
*
* @return true if empty tokens are returned as null
*/
public boolean isEmptyTokenAsNull() {
return this.emptyAsNull;
}
/**
* Gets whether the tokenizer currently ignores empty tokens.
* The default for this property is true.
*
* @return true if empty tokens are not returned
*/
public boolean isIgnoreEmptyTokens() {
return ignoreEmptyTokens;
}
/**
* Checks if the characters at the index specified match the quote
* already matched in readNextToken().
*
* @param srcChars the character array being tokenized
* @param pos the position to check for a quote
* @param len the length of the character array being tokenized
* @param quoteStart the start position of the matched quote, 0 if no quoting
* @param quoteLen the length of the matched quote, 0 if no quoting
* @return true if a quote is matched
*/
private boolean isQuote(final char[] srcChars,
final int pos,
final int len,
final int quoteStart,
final int quoteLen) {
for (int i = 0; i < quoteLen; i++) {
if (pos + i >= len || srcChars[pos + i] != srcChars[quoteStart + i]) {
return false;
}
}
return true;
}
/**
* Gets the next token.
*
* @return The next String token
* @throws NoSuchElementException if there are no more elements
*/
@Override
public String next() {
if (hasNext()) {
return tokens[tokenPos++];
}
throw new NoSuchElementException();
}
/**
* Gets the index of the next token to return.
*
* @return The next token index
*/
@Override
public int nextIndex() {
return tokenPos;
}
/**
* Gets the next token from the String.
* Equivalent to {@link #next()} except it returns null rather than
* throwing {@link NoSuchElementException} when no tokens remain.
*
* @return The next sequential token, or null when no more tokens are found
*/
public String nextToken() {
if (hasNext()) {
return tokens[tokenPos++];
}
return null;
}
/**
* Gets the token previous to the last returned token.
*
* @return The previous token
*/
@Override
public String previous() {
if (hasPrevious()) {
return tokens[--tokenPos];
}
throw new NoSuchElementException();
}
/**
* Gets the index of the previous token.
*
* @return The previous token index
*/
@Override
public int previousIndex() {
return tokenPos - 1;
}
/**
* Gets the previous token from the String.
*
* @return The previous sequential token, or null when no more tokens are found
*/
public String previousToken() {
if (hasPrevious()) {
return tokens[--tokenPos];
}
return null;
}
/**
* Reads character by character through the String to get the next token.
*
* @param srcChars the character array being tokenized
* @param start the first character of field
* @param len the length of the character array being tokenized
* @param workArea a temporary work area
* @param tokenList the list of parsed tokens
* @return The starting position of the next field (the character
* immediately after the delimiter), or -1 if end of string found
*/
private int readNextToken(final char[] srcChars,
int start,
final int len,
final StrBuilder workArea,
final List<String> tokenList) {
// skip all leading whitespace, unless it is the
// field delimiter or the quote character
while (start < len) {
final int removeLen = Math.max(
getIgnoredMatcher().isMatch(srcChars, start, start, len),
getTrimmerMatcher().isMatch(srcChars, start, start, len));
if (removeLen == 0
|| getDelimiterMatcher().isMatch(srcChars, start, start, len) > 0
|| getQuoteMatcher().isMatch(srcChars, start, start, len) > 0) {
break;
}
start += removeLen;
}
// handle reaching end
if (start >= len) {
addToken(tokenList, StringUtils.EMPTY);
return -1;
}
// handle empty token
final int delimLen = getDelimiterMatcher().isMatch(srcChars, start, start, len);
if (delimLen > 0) {
addToken(tokenList, StringUtils.EMPTY);
return start + delimLen;
}
// handle found token
final int quoteLen = getQuoteMatcher().isMatch(srcChars, start, start, len);
if (quoteLen > 0) {
return readWithQuotes(srcChars, start + quoteLen, len, workArea, tokenList, start, quoteLen);
}
return readWithQuotes(srcChars, start, len, workArea, tokenList, 0, 0);
}
/**
* Reads a possibly quoted string token.
*
* @param srcChars the character array being tokenized
* @param start the first character of field
* @param len the length of the character array being tokenized
* @param workArea a temporary work area
* @param tokenList the list of parsed tokens
* @param quoteStart the start position of the matched quote, 0 if no quoting
* @param quoteLen the length of the matched quote, 0 if no quoting
* @return The starting position of the next field (the character
* immediately after the delimiter, or if end of string found,
* then the length of string
*/
private int readWithQuotes(final char[] srcChars, final int start, final int len, final StrBuilder workArea,
final List<String> tokenList, final int quoteStart, final int quoteLen) {
// Loop until we've found the end of the quoted
// string or the end of the input
workArea.clear();
int pos = start;
boolean quoting = quoteLen > 0;
int trimStart = 0;
while (pos < len) {
// quoting mode can occur several times throughout a string
// we must switch between quoting and non-quoting until we
// encounter a non-quoted delimiter, or end of string
if (quoting) {
// In quoting mode
// If we've found a quote character, see if it's
// followed by a second quote. If so, then we need
// to actually put the quote character into the token
// rather than end the token.
if (isQuote(srcChars, pos, len, quoteStart, quoteLen)) {
if (isQuote(srcChars, pos + quoteLen, len, quoteStart, quoteLen)) {
// matched pair of quotes, thus an escaped quote
workArea.append(srcChars, pos, quoteLen);
pos += quoteLen * 2;
trimStart = workArea.size();
continue;
}
// end of quoting
quoting = false;
pos += quoteLen;
continue;
}
} else {
// Not in quoting mode
// check for delimiter, and thus end of token
final int delimLen = getDelimiterMatcher().isMatch(srcChars, pos, start, len);
if (delimLen > 0) {
// return condition when end of token found
addToken(tokenList, workArea.substring(0, trimStart));
return pos + delimLen;
}
// check for quote, and thus back into quoting mode
if (quoteLen > 0 && isQuote(srcChars, pos, len, quoteStart, quoteLen)) {
quoting = true;
pos += quoteLen;
continue;
}
// check for ignored (outside quotes), and ignore
final int ignoredLen = getIgnoredMatcher().isMatch(srcChars, pos, start, len);
if (ignoredLen > 0) {
pos += ignoredLen;
continue;
}
// check for trimmed character
// don't yet know if its at the end, so copy to workArea
// use trimStart to keep track of trim at the end
final int trimmedLen = getTrimmerMatcher().isMatch(srcChars, pos, start, len);
if (trimmedLen > 0) {
workArea.append(srcChars, pos, trimmedLen);
pos += trimmedLen;
continue;
}
}
// copy regular character from inside quotes
workArea.append(srcChars[pos++]);
trimStart = workArea.size();
}
// return condition when end of string found
addToken(tokenList, workArea.substring(0, trimStart));
return -1;
}
/**
* Unsupported ListIterator operation.
*
* @throws UnsupportedOperationException always
*/
@Override
public void remove() {
throw new UnsupportedOperationException("remove() is unsupported");
}
/**
* Resets this tokenizer, forgetting all parsing and iteration already completed.
* <p>
* This method allows the same tokenizer to be reused for the same String.
*
* @return this, to enable chaining
*/
public StrTokenizer reset() {
tokenPos = 0;
tokens = null;
return this;
}
/**
* Reset this tokenizer, giving it a new input string to parse.
* In this manner you can re-use a tokenizer with the same settings
* on multiple input lines.
*
* @param input the new character array to tokenize, not cloned, null sets no text to parse
* @return this, to enable chaining
*/
public StrTokenizer reset(final char[] input) {
reset();
if (input != null) {
this.chars = input.clone();
} else {
this.chars = null;
}
return this;
}
/**
* Reset this tokenizer, giving it a new input string to parse.
* In this manner you can re-use a tokenizer with the same settings
* on multiple input lines.
*
* @param input the new string to tokenize, null sets no text to parse
* @return this, to enable chaining
*/
public StrTokenizer reset(final String input) {
reset();
if (input != null) {
this.chars = input.toCharArray();
} else {
this.chars = null;
}
return this;
}
/**
* Unsupported ListIterator operation.
* @param obj this parameter ignored.
* @throws UnsupportedOperationException always
*/
@Override
public void set(final String obj) {
throw new UnsupportedOperationException("set() is unsupported");
}
/**
* Sets the field delimiter character.
*
* @param delim the delimiter character to use
* @return this, to enable chaining
*/
public StrTokenizer setDelimiterChar(final char delim) {
return setDelimiterMatcher(StrMatcher.charMatcher(delim));
}
/**
* Sets the field delimiter matcher.
* <p>
* The delimiter is used to separate one token from another.
* </p>
*
* @param delim the delimiter matcher to use
* @return this, to enable chaining
*/
public StrTokenizer setDelimiterMatcher(final StrMatcher delim) {
if (delim == null) {
this.delimMatcher = StrMatcher.noneMatcher();
} else {
this.delimMatcher = delim;
}
return this;
}
/**
* Sets the field delimiter string.
*
* @param delim the delimiter string to use
* @return this, to enable chaining
*/
public StrTokenizer setDelimiterString(final String delim) {
return setDelimiterMatcher(StrMatcher.stringMatcher(delim));
}
/**
* Sets whether the tokenizer should return empty tokens as null.
* The default for this property is false.
*
* @param emptyAsNull whether empty tokens are returned as null
* @return this, to enable chaining
*/
public StrTokenizer setEmptyTokenAsNull(final boolean emptyAsNull) {
this.emptyAsNull = emptyAsNull;
return this;
}
/**
* Sets the character to ignore.
* <p>
* This character is ignored when parsing the String, unless it is
* within a quoted region.
* </p>
*
* @param ignored the ignored character to use
* @return this, to enable chaining
*/
public StrTokenizer setIgnoredChar(final char ignored) {
return setIgnoredMatcher(StrMatcher.charMatcher(ignored));
}
/**
* Sets the matcher for characters to ignore.
* <p>
* These characters are ignored when parsing the String, unless they are
* within a quoted region.
* </p>
*
* @param ignored the ignored matcher to use, null ignored
* @return this, to enable chaining
*/
public StrTokenizer setIgnoredMatcher(final StrMatcher ignored) {
if (ignored != null) {
this.ignoredMatcher = ignored;
}
return this;
}
/**
* Sets whether the tokenizer should ignore and not return empty tokens.
* The default for this property is true.
*
* @param ignoreEmptyTokens whether empty tokens are not returned
* @return this, to enable chaining
*/
public StrTokenizer setIgnoreEmptyTokens(final boolean ignoreEmptyTokens) {
this.ignoreEmptyTokens = ignoreEmptyTokens;
return this;
}
/**
* Sets the quote character to use.
* <p>
* The quote character is used to wrap data between the tokens.
* This enables delimiters to be entered as data.
* </p>
*
* @param quote the quote character to use
* @return this, to enable chaining
*/
public StrTokenizer setQuoteChar(final char quote) {
return setQuoteMatcher(StrMatcher.charMatcher(quote));
}
/**
* Sets the quote matcher to use.
* <p>
* The quote character is used to wrap data between the tokens.
* This enables delimiters to be entered as data.
* </p>
*
* @param quote the quote matcher to use, null ignored
* @return this, to enable chaining
*/
public StrTokenizer setQuoteMatcher(final StrMatcher quote) {
if (quote != null) {
this.quoteMatcher = quote;
}
return this;
}
/**
* Sets the matcher for characters to trim.
* <p>
* These characters are trimmed off on each side of the delimiter
* until the token or quote is found.
* </p>
*
* @param trimmer the trimmer matcher to use, null ignored
* @return this, to enable chaining
*/
public StrTokenizer setTrimmerMatcher(final StrMatcher trimmer) {
if (trimmer != null) {
this.trimmerMatcher = trimmer;
}
return this;
}
/**
* Gets the number of tokens found in the String.
*
* @return The number of matched tokens
*/
public int size() {
checkTokenized();
return tokens.length;
}
/**
* Internal method to performs the tokenization.
* <p>
* Most users of this class do not need to call this method. This method
* will be called automatically by other (public) methods when required.
* </p>
* <p>
* This method exists to allow subclasses to add code before or after the
* tokenization. For example, a subclass could alter the character array,
* offset or count to be parsed, or call the tokenizer multiple times on
* multiple strings. It is also be possible to filter the results.
* </p>
* <p>
* {@code StrTokenizer} will always pass a zero offset and a count
* equal to the length of the array to this method, however a subclass
* may pass other values, or even an entirely different array.
* </p>
*
* @param srcChars the character array being tokenized, may be null
* @param offset the start position within the character array, must be valid
* @param count the number of characters to tokenize, must be valid
* @return The modifiable list of String tokens, unmodifiable if null array or zero count
*/
protected List<String> tokenize(final char[] srcChars, final int offset, final int count) {
if (srcChars == null || count == 0) {
return Collections.emptyList();
}
final StrBuilder buf = new StrBuilder();
final List<String> tokenList = new ArrayList<>();
int pos = offset;
// loop around the entire buffer
while (pos >= 0 && pos < count) {
// find next token
pos = readNextToken(srcChars, pos, count, buf, tokenList);
// handle case where end of string is a delimiter
if (pos >= count) {
addToken(tokenList, StringUtils.EMPTY);
}
}
return tokenList;
}
/**
* Gets the String content that the tokenizer is parsing.
*
* @return The string content being parsed
*/
@Override
public String toString() {
if (tokens == null) {
return "StrTokenizer[not tokenized yet]";
}
return "StrTokenizer" + getTokenList();
}
}
|
google/ExoPlayer | 37,185 | library/core/src/main/java/com/google/android/exoplayer2/mediacodec/MediaCodecInfo.java | /*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.mediacodec;
import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_AUDIO_CHANNEL_COUNT_CHANGED;
import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_AUDIO_ENCODING_CHANGED;
import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_AUDIO_SAMPLE_RATE_CHANGED;
import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_INITIALIZATION_DATA_CHANGED;
import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_MIME_TYPE_CHANGED;
import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_VIDEO_COLOR_INFO_CHANGED;
import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_VIDEO_RESOLUTION_CHANGED;
import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_VIDEO_ROTATION_CHANGED;
import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_WORKAROUND;
import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.REUSE_RESULT_NO;
import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.REUSE_RESULT_YES_WITHOUT_RECONFIGURATION;
import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.REUSE_RESULT_YES_WITH_FLUSH;
import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.REUSE_RESULT_YES_WITH_RECONFIGURATION;
import static java.lang.annotation.ElementType.TYPE_USE;
import android.graphics.Point;
import android.media.MediaCodec;
import android.media.MediaCodecInfo.AudioCapabilities;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecInfo.CodecProfileLevel;
import android.media.MediaCodecInfo.VideoCapabilities;
import android.media.MediaCodecInfo.VideoCapabilities.PerformancePoint;
import android.util.Pair;
import androidx.annotation.DoNotInline;
import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.annotation.VisibleForTesting;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.decoder.DecoderReuseEvaluation;
import com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DecoderDiscardReasons;
import com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DecoderReuseResult;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.Util;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.util.List;
/**
* Information about a {@link MediaCodec} for a given MIME type.
*
* @deprecated com.google.android.exoplayer2 is deprecated. Please migrate to androidx.media3 (which
* contains the same ExoPlayer code). See <a
* href="https://developer.android.com/guide/topics/media/media3/getting-started/migration-guide">the
* migration guide</a> for more details, including a script to help with the migration.
*/
@SuppressWarnings("InlinedApi")
@Deprecated
public final class MediaCodecInfo {
public static final String TAG = "MediaCodecInfo";
/**
* The value returned by {@link #getMaxSupportedInstances()} if the upper bound on the maximum
* number of supported instances is unknown.
*/
public static final int MAX_SUPPORTED_INSTANCES_UNKNOWN = -1;
/**
* The name of the decoder.
*
* <p>May be passed to {@link MediaCodec#createByCodecName(String)} to create an instance of the
* decoder.
*/
public final String name;
/** The MIME type handled by the codec. */
public final String mimeType;
/**
* The MIME type that the codec uses for media of type {@link #mimeType}. Equal to {@link
* #mimeType} unless the codec is known to use a non-standard MIME type alias.
*/
public final String codecMimeType;
/**
* The capabilities of the decoder, like the profiles/levels it supports, or {@code null} if not
* known.
*/
@Nullable public final CodecCapabilities capabilities;
/**
* Whether the decoder supports seamless resolution switches.
*
* @see CodecCapabilities#isFeatureSupported(String)
* @see CodecCapabilities#FEATURE_AdaptivePlayback
*/
public final boolean adaptive;
/**
* Whether the decoder supports tunneling.
*
* @see CodecCapabilities#isFeatureSupported(String)
* @see CodecCapabilities#FEATURE_TunneledPlayback
*/
public final boolean tunneling;
/**
* Whether the decoder is secure.
*
* @see CodecCapabilities#isFeatureSupported(String)
* @see CodecCapabilities#FEATURE_SecurePlayback
*/
public final boolean secure;
/**
* Whether the codec is hardware accelerated.
*
* <p>This could be an approximation as the exact information is only provided in API levels 29+.
*
* @see android.media.MediaCodecInfo#isHardwareAccelerated()
*/
public final boolean hardwareAccelerated;
/**
* Whether the codec is software only.
*
* <p>This could be an approximation as the exact information is only provided in API levels 29+.
*
* @see android.media.MediaCodecInfo#isSoftwareOnly()
*/
public final boolean softwareOnly;
/**
* Whether the codec is from the vendor.
*
* <p>This could be an approximation as the exact information is only provided in API levels 29+.
*
* @see android.media.MediaCodecInfo#isVendor()
*/
public final boolean vendor;
private final boolean isVideo;
/**
* Creates an instance.
*
* @param name The name of the {@link MediaCodec}.
* @param mimeType A MIME type supported by the {@link MediaCodec}.
* @param codecMimeType The MIME type that the codec uses for media of type {@code #mimeType}.
* Equal to {@code mimeType} unless the codec is known to use a non-standard MIME type alias.
* @param capabilities The capabilities of the {@link MediaCodec} for the specified MIME type, or
* {@code null} if not known.
* @param hardwareAccelerated Whether the {@link MediaCodec} is hardware accelerated.
* @param softwareOnly Whether the {@link MediaCodec} is software only.
* @param vendor Whether the {@link MediaCodec} is provided by the vendor.
* @param forceDisableAdaptive Whether {@link #adaptive} should be forced to {@code false}.
* @param forceSecure Whether {@link #secure} should be forced to {@code true}.
* @return The created instance.
*/
public static MediaCodecInfo newInstance(
String name,
String mimeType,
String codecMimeType,
@Nullable CodecCapabilities capabilities,
boolean hardwareAccelerated,
boolean softwareOnly,
boolean vendor,
boolean forceDisableAdaptive,
boolean forceSecure) {
return new MediaCodecInfo(
name,
mimeType,
codecMimeType,
capabilities,
hardwareAccelerated,
softwareOnly,
vendor,
/* adaptive= */ !forceDisableAdaptive
&& capabilities != null
&& isAdaptive(capabilities)
&& !needsDisableAdaptationWorkaround(name),
/* tunneling= */ capabilities != null && isTunneling(capabilities),
/* secure= */ forceSecure || (capabilities != null && isSecure(capabilities)));
}
@VisibleForTesting
/* package */ MediaCodecInfo(
String name,
String mimeType,
String codecMimeType,
@Nullable CodecCapabilities capabilities,
boolean hardwareAccelerated,
boolean softwareOnly,
boolean vendor,
boolean adaptive,
boolean tunneling,
boolean secure) {
this.name = Assertions.checkNotNull(name);
this.mimeType = mimeType;
this.codecMimeType = codecMimeType;
this.capabilities = capabilities;
this.hardwareAccelerated = hardwareAccelerated;
this.softwareOnly = softwareOnly;
this.vendor = vendor;
this.adaptive = adaptive;
this.tunneling = tunneling;
this.secure = secure;
isVideo = MimeTypes.isVideo(mimeType);
}
@Override
public String toString() {
return name;
}
/**
* The profile levels supported by the decoder.
*
* @return The profile levels supported by the decoder.
*/
public CodecProfileLevel[] getProfileLevels() {
return capabilities == null || capabilities.profileLevels == null
? new CodecProfileLevel[0]
: capabilities.profileLevels;
}
/**
* Returns an upper bound on the maximum number of supported instances, or {@link
* #MAX_SUPPORTED_INSTANCES_UNKNOWN} if unknown. Applications should not expect to operate more
* instances than the returned maximum.
*
* @see CodecCapabilities#getMaxSupportedInstances()
*/
public int getMaxSupportedInstances() {
if (Util.SDK_INT < 23 || capabilities == null) {
return MAX_SUPPORTED_INSTANCES_UNKNOWN;
}
return getMaxSupportedInstancesV23(capabilities);
}
/**
* Returns whether the decoder may support decoding the given {@code format} both functionally and
* performantly.
*
* @param format The input media format.
* @return Whether the decoder may support decoding the given {@code format}.
* @throws MediaCodecUtil.DecoderQueryException Thrown if an error occurs while querying decoders.
*/
public boolean isFormatSupported(Format format) throws MediaCodecUtil.DecoderQueryException {
if (!isSampleMimeTypeSupported(format)) {
return false;
}
if (!isCodecProfileAndLevelSupported(format, /* checkPerformanceCapabilities= */ true)) {
return false;
}
if (isVideo) {
if (format.width <= 0 || format.height <= 0) {
return true;
}
if (Util.SDK_INT >= 21) {
return isVideoSizeAndRateSupportedV21(format.width, format.height, format.frameRate);
} else {
boolean isFormatSupported =
format.width * format.height <= MediaCodecUtil.maxH264DecodableFrameSize();
if (!isFormatSupported) {
logNoSupport("legacyFrameSize, " + format.width + "x" + format.height);
}
return isFormatSupported;
}
} else { // Audio
return Util.SDK_INT < 21
|| ((format.sampleRate == Format.NO_VALUE
|| isAudioSampleRateSupportedV21(format.sampleRate))
&& (format.channelCount == Format.NO_VALUE
|| isAudioChannelCountSupportedV21(format.channelCount)));
}
}
/**
* Returns whether the decoder may functionally support decoding the given {@code format}.
*
* @param format The input media format.
* @return Whether the decoder may functionally support decoding the given {@code format}.
*/
public boolean isFormatFunctionallySupported(Format format) {
return isSampleMimeTypeSupported(format)
&& isCodecProfileAndLevelSupported(format, /* checkPerformanceCapabilities= */ false);
}
private boolean isSampleMimeTypeSupported(Format format) {
return mimeType.equals(format.sampleMimeType)
|| mimeType.equals(MediaCodecUtil.getAlternativeCodecMimeType(format));
}
private boolean isCodecProfileAndLevelSupported(
Format format, boolean checkPerformanceCapabilities) {
Pair<Integer, Integer> codecProfileAndLevel = MediaCodecUtil.getCodecProfileAndLevel(format);
if (codecProfileAndLevel == null) {
// If we don't know any better, we assume that the profile and level are supported.
return true;
}
int profile = codecProfileAndLevel.first;
int level = codecProfileAndLevel.second;
if (MimeTypes.VIDEO_DOLBY_VISION.equals(format.sampleMimeType)) {
// If this codec is H264 or H265, we only support the Dolby Vision base layer and need to map
// the Dolby Vision profile to the corresponding base layer profile. Also assume all levels of
// this base layer profile are supported.
if (MimeTypes.VIDEO_H264.equals(mimeType)) {
profile = CodecProfileLevel.AVCProfileHigh;
level = 0;
} else if (MimeTypes.VIDEO_H265.equals(mimeType)) {
profile = CodecProfileLevel.HEVCProfileMain10;
level = 0;
}
}
if (!isVideo && profile != CodecProfileLevel.AACObjectXHE) {
// Some devices/builds underreport audio capabilities, so assume support except for xHE-AAC
// which may not be widely supported. See https://github.com/google/ExoPlayer/issues/5145.
return true;
}
CodecProfileLevel[] profileLevels = getProfileLevels();
if (Util.SDK_INT <= 23 && MimeTypes.VIDEO_VP9.equals(mimeType) && profileLevels.length == 0) {
// Some older devices don't report profile levels for VP9. Estimate them using other data in
// the codec capabilities.
profileLevels = estimateLegacyVp9ProfileLevels(capabilities);
}
for (CodecProfileLevel profileLevel : profileLevels) {
if (profileLevel.profile == profile
&& (profileLevel.level >= level || !checkPerformanceCapabilities)
&& !needsProfileExcludedWorkaround(mimeType, profile)) {
return true;
}
}
logNoSupport("codec.profileLevel, " + format.codecs + ", " + codecMimeType);
return false;
}
/** Whether the codec handles HDR10+ out-of-band metadata. */
public boolean isHdr10PlusOutOfBandMetadataSupported() {
if (Util.SDK_INT >= 29 && MimeTypes.VIDEO_VP9.equals(mimeType)) {
for (CodecProfileLevel capabilities : getProfileLevels()) {
if (capabilities.profile == CodecProfileLevel.VP9Profile2HDR10Plus) {
return true;
}
}
}
return false;
}
/**
* Returns whether it may be possible to adapt an instance of this decoder to playing a different
* format when the codec is configured to play media in the specified {@code format}.
*
* <p>For adaptation to succeed, the codec must also be configured with appropriate maximum values
* and {@link #isSeamlessAdaptationSupported(Format, Format, boolean)} must return {@code true}
* for the old/new formats.
*
* @param format The format of media for which the decoder will be configured.
* @return Whether adaptation may be possible
*/
public boolean isSeamlessAdaptationSupported(Format format) {
if (isVideo) {
return adaptive;
} else {
Pair<Integer, Integer> profileLevel = MediaCodecUtil.getCodecProfileAndLevel(format);
return profileLevel != null && profileLevel.first == CodecProfileLevel.AACObjectXHE;
}
}
/**
* Returns whether it is possible to adapt an instance of this decoder seamlessly from {@code
* oldFormat} to {@code newFormat}. If {@code newFormat} may not be completely populated, pass
* {@code false} for {@code isNewFormatComplete}.
*
* <p>For adaptation to succeed, the codec must also be configured with maximum values that are
* compatible with the new format.
*
* @param oldFormat The format being decoded.
* @param newFormat The new format.
* @param isNewFormatComplete Whether {@code newFormat} is populated with format-specific
* metadata.
* @return Whether it is possible to adapt the decoder seamlessly.
* @deprecated Use {@link #canReuseCodec}.
*/
@Deprecated
public boolean isSeamlessAdaptationSupported(
Format oldFormat, Format newFormat, boolean isNewFormatComplete) {
if (!isNewFormatComplete && oldFormat.colorInfo != null && newFormat.colorInfo == null) {
newFormat = newFormat.buildUpon().setColorInfo(oldFormat.colorInfo).build();
}
@DecoderReuseResult int reuseResult = canReuseCodec(oldFormat, newFormat).result;
return reuseResult == REUSE_RESULT_YES_WITH_RECONFIGURATION
|| reuseResult == REUSE_RESULT_YES_WITHOUT_RECONFIGURATION;
}
/**
* Evaluates whether it's possible to reuse an instance of this decoder that's currently decoding
* {@code oldFormat} to decode {@code newFormat} instead.
*
* <p>For adaptation to succeed, the codec must also be configured with maximum values that are
* compatible with the new format.
*
* @param oldFormat The format being decoded.
* @param newFormat The new format.
* @return The result of the evaluation.
*/
public DecoderReuseEvaluation canReuseCodec(Format oldFormat, Format newFormat) {
@DecoderDiscardReasons int discardReasons = 0;
if (!Util.areEqual(oldFormat.sampleMimeType, newFormat.sampleMimeType)) {
discardReasons |= DISCARD_REASON_MIME_TYPE_CHANGED;
}
if (isVideo) {
if (oldFormat.rotationDegrees != newFormat.rotationDegrees) {
discardReasons |= DISCARD_REASON_VIDEO_ROTATION_CHANGED;
}
if (!adaptive
&& (oldFormat.width != newFormat.width || oldFormat.height != newFormat.height)) {
discardReasons |= DISCARD_REASON_VIDEO_RESOLUTION_CHANGED;
}
if (!Util.areEqual(oldFormat.colorInfo, newFormat.colorInfo)) {
discardReasons |= DISCARD_REASON_VIDEO_COLOR_INFO_CHANGED;
}
if (needsAdaptationReconfigureWorkaround(name)
&& !oldFormat.initializationDataEquals(newFormat)) {
discardReasons |= DISCARD_REASON_WORKAROUND;
}
if (discardReasons == 0) {
return new DecoderReuseEvaluation(
name,
oldFormat,
newFormat,
oldFormat.initializationDataEquals(newFormat)
? REUSE_RESULT_YES_WITHOUT_RECONFIGURATION
: REUSE_RESULT_YES_WITH_RECONFIGURATION,
/* discardReasons= */ 0);
}
} else {
if (oldFormat.channelCount != newFormat.channelCount) {
discardReasons |= DISCARD_REASON_AUDIO_CHANNEL_COUNT_CHANGED;
}
if (oldFormat.sampleRate != newFormat.sampleRate) {
discardReasons |= DISCARD_REASON_AUDIO_SAMPLE_RATE_CHANGED;
}
if (oldFormat.pcmEncoding != newFormat.pcmEncoding) {
discardReasons |= DISCARD_REASON_AUDIO_ENCODING_CHANGED;
}
// Check whether we're adapting between two xHE-AAC formats, for which adaptation is possible
// without reconfiguration or flushing.
if (discardReasons == 0 && MimeTypes.AUDIO_AAC.equals(mimeType)) {
@Nullable
Pair<Integer, Integer> oldCodecProfileLevel =
MediaCodecUtil.getCodecProfileAndLevel(oldFormat);
@Nullable
Pair<Integer, Integer> newCodecProfileLevel =
MediaCodecUtil.getCodecProfileAndLevel(newFormat);
if (oldCodecProfileLevel != null && newCodecProfileLevel != null) {
int oldProfile = oldCodecProfileLevel.first;
int newProfile = newCodecProfileLevel.first;
if (oldProfile == CodecProfileLevel.AACObjectXHE
&& newProfile == CodecProfileLevel.AACObjectXHE) {
return new DecoderReuseEvaluation(
name,
oldFormat,
newFormat,
REUSE_RESULT_YES_WITHOUT_RECONFIGURATION,
/* discardReasons= */ 0);
}
}
}
if (!oldFormat.initializationDataEquals(newFormat)) {
discardReasons |= DISCARD_REASON_INITIALIZATION_DATA_CHANGED;
}
if (needsAdaptationFlushWorkaround(mimeType)) {
discardReasons |= DISCARD_REASON_WORKAROUND;
}
if (discardReasons == 0) {
return new DecoderReuseEvaluation(
name, oldFormat, newFormat, REUSE_RESULT_YES_WITH_FLUSH, /* discardReasons= */ 0);
}
}
return new DecoderReuseEvaluation(name, oldFormat, newFormat, REUSE_RESULT_NO, discardReasons);
}
/**
* Whether the decoder supports video with a given width, height and frame rate.
*
* @param width Width in pixels.
* @param height Height in pixels.
* @param frameRate Optional frame rate in frames per second. Ignored if set to {@link
* Format#NO_VALUE} or any value less than or equal to 0.
* @return Whether the decoder supports video with the given width, height and frame rate.
*/
@RequiresApi(21)
public boolean isVideoSizeAndRateSupportedV21(int width, int height, double frameRate) {
if (capabilities == null) {
logNoSupport("sizeAndRate.caps");
return false;
}
VideoCapabilities videoCapabilities = capabilities.getVideoCapabilities();
if (videoCapabilities == null) {
logNoSupport("sizeAndRate.vCaps");
return false;
}
if (Util.SDK_INT >= 29) {
@PerformancePointCoverageResult
int evaluation =
Api29.areResolutionAndFrameRateCovered(videoCapabilities, width, height, frameRate);
if (evaluation == COVERAGE_RESULT_YES) {
return true;
} else if (evaluation == COVERAGE_RESULT_NO) {
logNoSupport("sizeAndRate.cover, " + width + "x" + height + "@" + frameRate);
return false;
}
// COVERAGE_RESULT_NO_EMPTY_LIST falls through to API 21+ code below
}
if (!areSizeAndRateSupportedV21(videoCapabilities, width, height, frameRate)) {
if (width >= height
|| !needsRotatedVerticalResolutionWorkaround(name)
|| !areSizeAndRateSupportedV21(videoCapabilities, height, width, frameRate)) {
logNoSupport("sizeAndRate.support, " + width + "x" + height + "@" + frameRate);
return false;
}
logAssumedSupport("sizeAndRate.rotated, " + width + "x" + height + "@" + frameRate);
}
return true;
}
/**
* Returns the smallest video size greater than or equal to a specified size that also satisfies
* the {@link MediaCodec}'s width and height alignment requirements.
*
* <p>Must not be called if the device SDK version is less than 21.
*
* @param width Width in pixels.
* @param height Height in pixels.
* @return The smallest video size greater than or equal to the specified size that also satisfies
* the {@link MediaCodec}'s width and height alignment requirements, or null if not a video
* codec.
*/
@Nullable
@RequiresApi(21)
public Point alignVideoSizeV21(int width, int height) {
if (capabilities == null) {
return null;
}
VideoCapabilities videoCapabilities = capabilities.getVideoCapabilities();
if (videoCapabilities == null) {
return null;
}
return alignVideoSizeV21(videoCapabilities, width, height);
}
/**
* Whether the decoder supports audio with a given sample rate.
*
* <p>Must not be called if the device SDK version is less than 21.
*
* @param sampleRate The sample rate in Hz.
* @return Whether the decoder supports audio with the given sample rate.
*/
@RequiresApi(21)
public boolean isAudioSampleRateSupportedV21(int sampleRate) {
if (capabilities == null) {
logNoSupport("sampleRate.caps");
return false;
}
AudioCapabilities audioCapabilities = capabilities.getAudioCapabilities();
if (audioCapabilities == null) {
logNoSupport("sampleRate.aCaps");
return false;
}
if (!audioCapabilities.isSampleRateSupported(sampleRate)) {
logNoSupport("sampleRate.support, " + sampleRate);
return false;
}
return true;
}
/**
* Whether the decoder supports audio with a given channel count.
*
* <p>Must not be called if the device SDK version is less than 21.
*
* @param channelCount The channel count.
* @return Whether the decoder supports audio with the given channel count.
*/
@RequiresApi(21)
public boolean isAudioChannelCountSupportedV21(int channelCount) {
if (capabilities == null) {
logNoSupport("channelCount.caps");
return false;
}
AudioCapabilities audioCapabilities = capabilities.getAudioCapabilities();
if (audioCapabilities == null) {
logNoSupport("channelCount.aCaps");
return false;
}
int maxInputChannelCount =
adjustMaxInputChannelCount(name, mimeType, audioCapabilities.getMaxInputChannelCount());
if (maxInputChannelCount < channelCount) {
logNoSupport("channelCount.support, " + channelCount);
return false;
}
return true;
}
private void logNoSupport(String message) {
Log.d(
TAG,
"NoSupport ["
+ message
+ "] ["
+ name
+ ", "
+ mimeType
+ "] ["
+ Util.DEVICE_DEBUG_INFO
+ "]");
}
private void logAssumedSupport(String message) {
Log.d(
TAG,
"AssumedSupport ["
+ message
+ "] ["
+ name
+ ", "
+ mimeType
+ "] ["
+ Util.DEVICE_DEBUG_INFO
+ "]");
}
private static int adjustMaxInputChannelCount(String name, String mimeType, int maxChannelCount) {
if (maxChannelCount > 1 || (Util.SDK_INT >= 26 && maxChannelCount > 0)) {
// The maximum channel count looks like it's been set correctly.
return maxChannelCount;
}
if (MimeTypes.AUDIO_MPEG.equals(mimeType)
|| MimeTypes.AUDIO_AMR_NB.equals(mimeType)
|| MimeTypes.AUDIO_AMR_WB.equals(mimeType)
|| MimeTypes.AUDIO_AAC.equals(mimeType)
|| MimeTypes.AUDIO_VORBIS.equals(mimeType)
|| MimeTypes.AUDIO_OPUS.equals(mimeType)
|| MimeTypes.AUDIO_RAW.equals(mimeType)
|| MimeTypes.AUDIO_FLAC.equals(mimeType)
|| MimeTypes.AUDIO_ALAW.equals(mimeType)
|| MimeTypes.AUDIO_MLAW.equals(mimeType)
|| MimeTypes.AUDIO_MSGSM.equals(mimeType)) {
// Platform code should have set a default.
return maxChannelCount;
}
// The maximum channel count looks incorrect. Adjust it to an assumed default.
int assumedMaxChannelCount;
if (MimeTypes.AUDIO_AC3.equals(mimeType)) {
assumedMaxChannelCount = 6;
} else if (MimeTypes.AUDIO_E_AC3.equals(mimeType)) {
assumedMaxChannelCount = 16;
} else {
// Default to the platform limit, which is 30.
assumedMaxChannelCount = 30;
}
Log.w(
TAG,
"AssumedMaxChannelAdjustment: "
+ name
+ ", ["
+ maxChannelCount
+ " to "
+ assumedMaxChannelCount
+ "]");
return assumedMaxChannelCount;
}
private static boolean isAdaptive(CodecCapabilities capabilities) {
return Util.SDK_INT >= 19 && isAdaptiveV19(capabilities);
}
@RequiresApi(19)
private static boolean isAdaptiveV19(CodecCapabilities capabilities) {
return capabilities.isFeatureSupported(CodecCapabilities.FEATURE_AdaptivePlayback);
}
private static boolean isTunneling(CodecCapabilities capabilities) {
return Util.SDK_INT >= 21 && isTunnelingV21(capabilities);
}
@RequiresApi(21)
private static boolean isTunnelingV21(CodecCapabilities capabilities) {
return capabilities.isFeatureSupported(CodecCapabilities.FEATURE_TunneledPlayback);
}
private static boolean isSecure(CodecCapabilities capabilities) {
return Util.SDK_INT >= 21 && isSecureV21(capabilities);
}
@RequiresApi(21)
private static boolean isSecureV21(CodecCapabilities capabilities) {
return capabilities.isFeatureSupported(CodecCapabilities.FEATURE_SecurePlayback);
}
@RequiresApi(21)
private static boolean areSizeAndRateSupportedV21(
VideoCapabilities capabilities, int width, int height, double frameRate) {
// Don't ever fail due to alignment. See: https://github.com/google/ExoPlayer/issues/6551.
Point alignedSize = alignVideoSizeV21(capabilities, width, height);
width = alignedSize.x;
height = alignedSize.y;
// VideoCapabilities.areSizeAndRateSupported incorrectly returns false if frameRate < 1 on some
// versions of Android, so we only check the size in this case [Internal ref: b/153940404].
if (frameRate == Format.NO_VALUE || frameRate < 1) {
return capabilities.isSizeSupported(width, height);
} else {
// The signaled frame rate may be slightly higher than the actual frame rate, so we take the
// floor to avoid situations where a range check in areSizeAndRateSupported fails due to
// slightly exceeding the limits for a standard format (e.g., 1080p at 30 fps).
double floorFrameRate = Math.floor(frameRate);
return capabilities.areSizeAndRateSupported(width, height, floorFrameRate);
}
}
@RequiresApi(21)
private static Point alignVideoSizeV21(VideoCapabilities capabilities, int width, int height) {
int widthAlignment = capabilities.getWidthAlignment();
int heightAlignment = capabilities.getHeightAlignment();
return new Point(
Util.ceilDivide(width, widthAlignment) * widthAlignment,
Util.ceilDivide(height, heightAlignment) * heightAlignment);
}
@RequiresApi(23)
private static int getMaxSupportedInstancesV23(CodecCapabilities capabilities) {
return capabilities.getMaxSupportedInstances();
}
/**
* Called on devices with {@link Util#SDK_INT} 23 and below, for VP9 decoders whose {@link
* CodecCapabilities} do not correctly report profile levels. The returned {@link
* CodecProfileLevel CodecProfileLevels} are estimated based on other data in the {@link
* CodecCapabilities}.
*
* @param capabilities The {@link CodecCapabilities} for a VP9 decoder, or {@code null} if not
* known.
* @return The estimated {@link CodecProfileLevel CodecProfileLevels} for the decoder.
*/
private static CodecProfileLevel[] estimateLegacyVp9ProfileLevels(
@Nullable CodecCapabilities capabilities) {
int maxBitrate = 0;
if (capabilities != null) {
@Nullable VideoCapabilities videoCapabilities = capabilities.getVideoCapabilities();
if (videoCapabilities != null) {
maxBitrate = videoCapabilities.getBitrateRange().getUpper();
}
}
// Values taken from https://www.webmproject.org/vp9/levels.
int level;
if (maxBitrate >= 180_000_000) {
level = CodecProfileLevel.VP9Level52;
} else if (maxBitrate >= 120_000_000) {
level = CodecProfileLevel.VP9Level51;
} else if (maxBitrate >= 60_000_000) {
level = CodecProfileLevel.VP9Level5;
} else if (maxBitrate >= 30_000_000) {
level = CodecProfileLevel.VP9Level41;
} else if (maxBitrate >= 18_000_000) {
level = CodecProfileLevel.VP9Level4;
} else if (maxBitrate >= 12_000_000) {
level = CodecProfileLevel.VP9Level31;
} else if (maxBitrate >= 7_200_000) {
level = CodecProfileLevel.VP9Level3;
} else if (maxBitrate >= 3_600_000) {
level = CodecProfileLevel.VP9Level21;
} else if (maxBitrate >= 1_800_000) {
level = CodecProfileLevel.VP9Level2;
} else if (maxBitrate >= 800_000) {
level = CodecProfileLevel.VP9Level11;
} else { // Assume level 1 is always supported.
level = CodecProfileLevel.VP9Level1;
}
CodecProfileLevel profileLevel = new CodecProfileLevel();
// Since this method is for legacy devices only, assume that only profile 0 is supported.
profileLevel.profile = CodecProfileLevel.VP9Profile0;
profileLevel.level = level;
return new CodecProfileLevel[] {profileLevel};
}
/**
* Returns whether the decoder is known to fail when adapting, despite advertising itself as an
* adaptive decoder.
*
* @param name The decoder name.
* @return True if the decoder is known to fail when adapting.
*/
private static boolean needsDisableAdaptationWorkaround(String name) {
return Util.SDK_INT <= 22
&& ("ODROID-XU3".equals(Util.MODEL) || "Nexus 10".equals(Util.MODEL))
&& ("OMX.Exynos.AVC.Decoder".equals(name) || "OMX.Exynos.AVC.Decoder.secure".equals(name));
}
/**
* Returns whether the decoder is known to fail when an attempt is made to reconfigure it with a
* new format's configuration data.
*
* @param name The name of the decoder.
* @return Whether the decoder is known to fail when an attempt is made to reconfigure it with a
* new format's configuration data.
*/
private static boolean needsAdaptationReconfigureWorkaround(String name) {
return Util.MODEL.startsWith("SM-T230") && "OMX.MARVELL.VIDEO.HW.CODA7542DECODER".equals(name);
}
/**
* Returns whether the decoder is known to behave incorrectly if flushed to adapt to a new format.
*
* @param mimeType The name of the MIME type.
* @return Whether the decoder is known to to behave incorrectly if flushed to adapt to a new
* format.
*/
private static boolean needsAdaptationFlushWorkaround(String mimeType) {
// For Opus, we don't flush and reuse the codec because the decoder may discard samples after
// flushing, which would result in audio being dropped just after a stream change (see
// [Internal: b/143450854]). For other formats, we allow reuse after flushing if the codec
// initialization data is unchanged.
return MimeTypes.AUDIO_OPUS.equals(mimeType);
}
/**
* Capabilities are known to be inaccurately reported for vertical resolutions on some devices.
* [Internal ref: b/31387661]. When this workaround is enabled, we also check whether the
* capabilities indicate support if the width and height are swapped. If they do, we assume that
* the vertical resolution is also supported.
*
* @param name The name of the codec.
* @return Whether to enable the workaround.
*/
private static boolean needsRotatedVerticalResolutionWorkaround(String name) {
if ("OMX.MTK.VIDEO.DECODER.HEVC".equals(name) && "mcv5a".equals(Util.DEVICE)) {
// See https://github.com/google/ExoPlayer/issues/6612.
return false;
}
return true;
}
/**
* Whether a profile is excluded from the list of supported profiles. This may happen when a
* device declares support for a profile it doesn't actually support.
*/
private static boolean needsProfileExcludedWorkaround(String mimeType, int profile) {
// See https://github.com/google/ExoPlayer/issues/3537
return MimeTypes.VIDEO_H265.equals(mimeType)
&& CodecProfileLevel.HEVCProfileMain10 == profile
&& ("sailfish".equals(Util.DEVICE) || "marlin".equals(Util.DEVICE));
}
/** Whether the device is known to have wrong {@link PerformancePoint} declarations. */
private static boolean needsIgnorePerformancePointsWorkaround() {
// See https://github.com/google/ExoPlayer/issues/10898 and [internal ref: b/267324685].
return /* Chromecast with Google TV */ Util.DEVICE.equals("sabrina")
|| Util.DEVICE.equals("boreal")
/* Lenovo Tablet M10 FHD Plus */
|| Util.MODEL.startsWith("Lenovo TB-X605")
|| Util.MODEL.startsWith("Lenovo TB-X606")
|| Util.MODEL.startsWith("Lenovo TB-X616");
}
/** Possible outcomes of evaluating PerformancePoint coverage */
@Documented
@Retention(RetentionPolicy.SOURCE)
@Target(TYPE_USE)
@IntDef({COVERAGE_RESULT_YES, COVERAGE_RESULT_NO, COVERAGE_RESULT_NO_EMPTY_LIST})
private @interface PerformancePointCoverageResult {}
/** The decoder has a PerformancePoint that covers the resolution and frame rate */
private static final int COVERAGE_RESULT_YES = 2;
/**
* The decoder has at least one PerformancePoint, but none of them cover the resolution and frame
* rate
*/
private static final int COVERAGE_RESULT_NO = 1;
/** The VideoCapabilities does not contain any PerformancePoints */
private static final int COVERAGE_RESULT_NO_EMPTY_LIST = 0;
@RequiresApi(29)
private static final class Api29 {
@DoNotInline
public static @PerformancePointCoverageResult int areResolutionAndFrameRateCovered(
VideoCapabilities videoCapabilities, int width, int height, double frameRate) {
List<PerformancePoint> performancePointList =
videoCapabilities.getSupportedPerformancePoints();
if (performancePointList == null
|| performancePointList.isEmpty()
|| needsIgnorePerformancePointsWorkaround()) {
return COVERAGE_RESULT_NO_EMPTY_LIST;
}
// Round frame rate down to to avoid situations where a range check in
// covers fails due to slightly exceeding the limits for a standard format
// (e.g., 1080p at 30 fps). [Internal ref: b/134706676]
PerformancePoint targetPerformancePoint =
new PerformancePoint(width, height, (int) frameRate);
for (int i = 0; i < performancePointList.size(); i++) {
if (performancePointList.get(i).covers(targetPerformancePoint)) {
return COVERAGE_RESULT_YES;
}
}
return COVERAGE_RESULT_NO;
}
}
}
|
apache/impala | 37,264 | fe/src/test/java/org/apache/impala/catalog/metastore/CatalogHmsSyncToLatestEventIdTest.java | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.impala.catalog.metastore;
import com.google.common.collect.Lists;
import java.util.HashMap;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
import org.apache.impala.catalog.*;
import org.apache.impala.catalog.MetaStoreClientPool.MetaStoreClient;
import org.apache.impala.catalog.events.ExternalEventsProcessor;
import org.apache.impala.catalog.events.MetastoreEventsProcessor;
import org.apache.impala.catalog.MetastoreApiTestUtils;
import org.apache.impala.catalog.events.NoOpEventProcessor;
import org.apache.impala.catalog.events.SynchronousHMSEventProcessorForTests;
import org.apache.impala.service.BackendConfig;
import org.apache.impala.service.CatalogOpExecutor;
import org.apache.impala.service.MetadataOp;
import org.apache.impala.testutil.CatalogServiceTestCatalog;
import org.apache.impala.testutil.CatalogTestMetastoreServer;
import org.apache.thrift.TException;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assume;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId;
import org.apache.impala.catalog.CatalogException;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
/**
* This class mainly tests ddl operations from catalogHmsClient and asserts that
* db/table is synced to the latest event id. It also processes the same events
* from MetastoreEventProcessor and asserts that those events are skipped since
* they have already been processed by catalogHmsClient
*/
@RunWith(Parameterized.class)
public class CatalogHmsSyncToLatestEventIdTest extends AbstractCatalogMetastoreTest {
private static String TEST_DB_NAME = "sync_to_latest_events_test_db";
private static Logger LOG =
LoggerFactory.getLogger(CatalogHmsSyncToLatestEventIdTest.class);
protected static CatalogServiceTestCatalog catalog_;
protected static CatalogOpExecutor catalogOpExecutor_;
protected static CatalogMetastoreServer catalogMetastoreServer_;
protected static HiveMetaStoreClient catalogHmsClient_;
private static SynchronousHMSEventProcessorForTests eventsProcessor_;
protected static final Configuration CONF = MetastoreConf.newMetastoreConf();
private String tableType_;
private static String managedTableType =
org.apache.hadoop.hive.metastore.TableType.MANAGED_TABLE.toString();
private static String externalTableType =
org.apache.hadoop.hive.metastore.TableType.EXTERNAL_TABLE.toString();
private static boolean flagEnableCatalogCache ,flagInvalidateCache,
flagSyncToLatestEventId;
@BeforeClass
public static void setup() throws Exception {
catalog_ = CatalogServiceTestCatalog.create();
catalogOpExecutor_ = catalog_.getCatalogOpExecutor();
catalogMetastoreServer_ = new CatalogTestMetastoreServer(
catalogOpExecutor_);
catalog_.setCatalogMetastoreServer(catalogMetastoreServer_);
try (MetaStoreClient metaStoreClient = catalog_.getMetaStoreClient()) {
CurrentNotificationEventId currentNotificationId =
metaStoreClient.getHiveClient().getCurrentNotificationEventId();
eventsProcessor_ = new SynchronousHMSEventProcessorForTests(
catalogOpExecutor_, currentNotificationId.getEventId(), 10000L);
eventsProcessor_.start();
}
// Don't set event processor in catalog because
// sync to latest event id should work even if event processor
// is disabled
catalogMetastoreServer_.start();
MetastoreConf.setVar(CONF, ConfVars.THRIFT_URIS,
"thrift://localhost:" + catalogMetastoreServer_.getPort());
// metastore clients which connect to catalogd's HMS endpoint need this
// configuration set since the forwarded HMS call use catalogd's HMS client
// not the end-user's UGI.
CONF.set("hive.metastore.execute.setugi", "false");
catalogHmsClient_ = new HiveMetaStoreClient(CONF);
assertTrue("Event processor should not be set",
catalog_.getMetastoreEventProcessor() instanceof NoOpEventProcessor);
// get previous values of flag to be set in cleanup
flagEnableCatalogCache = BackendConfig.INSTANCE.enableCatalogdHMSCache();
flagInvalidateCache = BackendConfig.INSTANCE.invalidateCatalogdHMSCacheOnDDLs();
flagSyncToLatestEventId = BackendConfig.INSTANCE.enableSyncToLatestEventOnDdls();
}
@AfterClass
public static void cleanUp() throws Exception {
// in cleanup, set flag's values to previous value
BackendConfig.INSTANCE.setEnableCatalogdHMSCache(flagEnableCatalogCache);
BackendConfig.INSTANCE.setEnableSyncToLatestEventOnDdls(flagSyncToLatestEventId);
BackendConfig.INSTANCE.setInvalidateCatalogdHMSCacheOnDDLs(flagInvalidateCache);
if (eventsProcessor_ != null) {
eventsProcessor_.shutdown();
}
catalogMetastoreServer_.stop();
catalog_.close();
}
@After
public void afterTest() throws TException, CatalogException {
String dbName = TEST_DB_NAME;
try {
try (MetaStoreClient msClient = catalog_.getMetaStoreClient()) {
msClient.getHiveClient().dropDatabase(dbName, true, true, true);
}
catalog_.removeDb(dbName);
} catch (NoSuchObjectException e) {
LOG.error("database {} does not exist in catalogd", dbName);
catalog_.removeDb(dbName);
}
}
@Before
public void beforeTest() throws Exception {
BackendConfig.INSTANCE.setEnableCatalogdHMSCache(true);
BackendConfig.INSTANCE.setEnableSyncToLatestEventOnDdls(true);
BackendConfig.INSTANCE.setInvalidateCatalogdHMSCacheOnDDLs(false);
}
public CatalogHmsSyncToLatestEventIdTest(String tableType) {
tableType_ = tableType;
}
@Parameterized.Parameters
public static String[] createTableTypes() {
return new String[] {managedTableType, externalTableType};
}
@Test
public void testCreateDatabase() throws Exception {
LOG.info("Executing testCreateDatabase");
String dbName = "test_create_database";
try {
Database msDb = MetastoreApiTestUtils
.createHmsDatabaseObject(null, dbName, null);
catalogHmsClient_.createDatabase(msDb);
Db db = catalog_.getDb(dbName);
assertTrue(db != null);
assertTrue(db.getLastSyncedEventId() != -1);
assertTrue(db.getLastSyncedEventId() == db.getCreateEventId());
} finally {
catalogHmsClient_.dropDatabase(dbName, true, true, true);
assertTrue("db " + dbName + " should not be present in catalogd",
catalog_.getDb(dbName) == null);
}
}
@Test
public void testAlterDatabase() throws Exception {
LOG.info("Executing testAlterDatabase");
String dbName = "test_alter_database";
try {
createDatabaseInCatalog(dbName);
eventsProcessor_.processEvents();
long lastSkippedEventsCount = eventsProcessor_.getMetrics()
.getCounter(MetastoreEventsProcessor.EVENTS_SKIPPED_METRIC).getCount();
Db catalogDb = catalog_.getDb(dbName);
long prevSyncedEventId = catalogDb.getLastSyncedEventId();
try (MetaStoreClient msClient = catalog_.getMetaStoreClient()) {
MetastoreApiTestUtils.addDatabaseParametersInHms(msClient, dbName,
"key1", "val1");
}
String newOwner = "new_owner";
Database alteredMsDb = getDatabaseInHms(dbName);
alteredMsDb.setOwnerName(newOwner);
// alter db via catalogHmsClient
catalogHmsClient_.alterDatabase(dbName, alteredMsDb);
catalogDb = catalog_.getDb(dbName);
assertTrue(catalogDb.getOwnerUser().equals(newOwner));
assertTrue(catalogDb.getMetaStoreDb().getParameters()
.get("key1").equals("val1"));
assertTrue(catalogDb.getLastSyncedEventId() >
prevSyncedEventId);
eventsProcessor_.processEvents();
long currentSkippedEventsCount = eventsProcessor_.getMetrics()
.getCounter(MetastoreEventsProcessor.EVENTS_SKIPPED_METRIC).getCount();
assertTrue(lastSkippedEventsCount + 2 == currentSkippedEventsCount);
} finally {
catalogHmsClient_.dropDatabase(dbName, true, true, true);
}
}
@Test
public void testAddDropAlterPartitions() throws Exception {
LOG.info("Executing testAddDropAlterPartitions");
String tblName = "test_add_drop_alter_partitions_" + tableType_ + "_tbl" ;
try {
createDatabaseInCatalog(TEST_DB_NAME);
try {
catalogHmsClient_.dropTable(TEST_DB_NAME, tblName, true, true);
} catch (Exception e) {
LOG.error("Failed to drop table {} from HMS", tblName);
}
catalogHmsClient_
.createTable(MetastoreApiTestUtils.getTestTable(null,
TEST_DB_NAME, tblName, null, true, tableType_));
HdfsTable tbl = getCatalogHdfsTable(TEST_DB_NAME, tblName);
assertTrue(tbl != null);
assertTrue("table's last synced id should not be -1",
tbl.getLastSyncedEventId() != -1);
assertTrue(tbl.getLastSyncedEventId() == tbl.getCreateEventId());
eventsProcessor_.processEvents();
long lastSkippedCount = eventsProcessor_.getMetrics()
.getCounter(MetastoreEventsProcessor.EVENTS_SKIPPED_METRIC).getCount();
long prevSyncedEventId = tbl.getLastSyncedEventId();
List<List<String> > partVals = new ArrayList<>();
partVals.add(Arrays.asList("1"));
partVals.add(Arrays.asList("2"));
partVals.add(Arrays.asList("3"));
addPartitionsInHms(TEST_DB_NAME, tblName, partVals);
// added partitions should not reflect in table
// stored in catalog cache
assertTrue(tbl.getPartitions().size() == 0);
// alter partition 2 directly in HMS
Partition partitionToAlter =
getPartitionInHms(TEST_DB_NAME, tblName, Arrays.asList("2"));
String newLocation = "/path/to/newLocation/";
partitionToAlter.getSd().setLocation(newLocation);
alterPartitionInHms(TEST_DB_NAME, tblName, partitionToAlter);
// when dropping partitions from catalogHmsClient, sync to latest
// event id adds 3 partitions and drops 1
catalogHmsClient_.dropPartition(TEST_DB_NAME, tblName,
Arrays.asList("3"), true);
tbl = getCatalogHdfsTable(TEST_DB_NAME, tblName);
assertTrue("Table should have 2 partitions after dropping 1 "
+ "out of 3 partitions", tbl.getPartitions().size() == 2);
// assert that partition with new location from cached table
// exists
FeFsPartition modifiedPartition = null;
for (FeFsPartition part : tbl.loadAllPartitions()) {
if (part.getLocation().equals(newLocation)) {
modifiedPartition = part;
break;
}
}
assertTrue(modifiedPartition != null);
assertTrue(tbl.getLastSyncedEventId() > prevSyncedEventId);
// test that events processor skipped all events
// since last synced event
eventsProcessor_.processEvents();
long currentSkippedCount = eventsProcessor_.getMetrics()
.getCounter(MetastoreEventsProcessor.EVENTS_SKIPPED_METRIC).getCount();
assertTrue( String.format("CurrentSkippedCount %s differs from "
+ "lastSkippedCount + 3 %s", currentSkippedCount,
lastSkippedCount),
currentSkippedCount == lastSkippedCount + 3);
} finally {
catalogHmsClient_.dropTable(TEST_DB_NAME, tblName, true, true);
}
}
@Test
public void testExchangePartition() throws Exception {
// run this test only for managed table
Assume.assumeTrue(tableType_.equals(managedTableType));
LOG.info("Executing testExchangePartition");
String srcTblName = "test_exchange_partition_src_" + tableType_ + "_tbl";
String destTblName = "test_exchange_partition_dest_" + tableType_ + "_tbl";
try {
createDatabaseInCatalog(TEST_DB_NAME);
// drop tables if already exist
catalogHmsClient_.dropTable(TEST_DB_NAME, srcTblName, true, true);
catalogHmsClient_.dropTable(TEST_DB_NAME, destTblName, true, true);
org.apache.hadoop.hive.metastore.api.Table srcMsTable =
MetastoreApiTestUtils.getTestTable(null, TEST_DB_NAME,
srcTblName, null, true, managedTableType);
catalogHmsClient_.createTable(srcMsTable);
// add 3 partitions but only in HMS
List<List<String> > srcPartVals = new ArrayList<>();
srcPartVals.add(Arrays.asList("1"));
srcPartVals.add(Arrays.asList("2"));
srcPartVals.add(Arrays.asList("3"));
addPartitionsInHms(TEST_DB_NAME, srcTblName, srcPartVals);
Map<String, String> partitionSpec =
getPartitionSpec(srcMsTable, Arrays.asList("1"));
org.apache.hadoop.hive.metastore.api.Table destMsTable =
MetastoreApiTestUtils.getTestTable(null,
TEST_DB_NAME, destTblName, null, true, managedTableType);
catalogHmsClient_.createTable(destMsTable);
// add partition with val 4 but only in HMS
List<List<String> > destPartVals = new ArrayList<>();
destPartVals.add(Arrays.asList("4"));
addPartitionsInHms(TEST_DB_NAME, destTblName, destPartVals);
long eventIdBeforeExchange = getLatestEventIdFromHMS();
catalogHmsClient_.exchange_partition(partitionSpec,TEST_DB_NAME, srcTblName,
TEST_DB_NAME, destTblName);
HdfsTable srcCatalogTbl = getCatalogHdfsTable(TEST_DB_NAME, srcTblName);
HdfsTable destCatalogTbl = getCatalogHdfsTable(TEST_DB_NAME, destTblName);
assertTrue(srcCatalogTbl.getPartitions().size() == 2);
assertTrue(destCatalogTbl.getPartitions().size() == 2);
// assert that part with val 1 does not exist in src table
for (FeFsPartition srcPartition : srcCatalogTbl.loadAllPartitions()) {
List<String> partVals =
srcPartition.getPartitionValuesAsStrings(false);
assertFalse(partVals.equals(Arrays.asList("1")));
}
// it is enough to assert that last synced event id of Hdfs table >
// event id before exchange partition api
assertTrue(srcCatalogTbl.getLastSyncedEventId() > eventIdBeforeExchange);
assertTrue(destCatalogTbl.getLastSyncedEventId() > eventIdBeforeExchange);
} finally {
catalogHmsClient_.dropTable(TEST_DB_NAME, srcTblName, true, true);
catalogHmsClient_.dropTable(TEST_DB_NAME, destTblName, true, true);
}
}
@Test
public void testTableCreateDropCreate() throws Exception {
LOG.info("Executing testTableCreateDropCreate");
String tblName = "test_create_drop_create_" + tableType_ + "_tbl";
String tblNameLowerCase = tblName.toLowerCase();
try {
createDatabaseInCatalog(TEST_DB_NAME);
catalogHmsClient_.dropTable(TEST_DB_NAME, tblName, true, true);
catalogHmsClient_.createTable(MetastoreApiTestUtils.getTestTable(null,
TEST_DB_NAME, tblName, null, true, tableType_));
HdfsTable tbl = (HdfsTable) catalog_.getTable(TEST_DB_NAME, tblNameLowerCase);
assertTrue(tbl.isPartitioned());
// last synced event id is same as create event id
long prevCreateEventId = tbl.getLastSyncedEventId();
// drop table from HMS skipping catalog metastore
try (MetaStoreClient msClient = catalog_.getMetaStoreClient()) {
msClient.getHiveClient().dropTable(TEST_DB_NAME, tblName, true, false);
}
// recreate table with same name but unpartitioned to distinguish it
// from previous table
catalogHmsClient_.createTable(MetastoreApiTestUtils.getTestTable(null,
TEST_DB_NAME, tblName, null, false, tableType_));
HdfsTable currentTbl = (HdfsTable) catalog_.getTable(TEST_DB_NAME,
tblNameLowerCase);
assertTrue(currentTbl.getLastSyncedEventId() != prevCreateEventId);
assertTrue(!currentTbl.isPartitioned());
} finally {
catalogHmsClient_.dropTable(TEST_DB_NAME, tblName, true, true);
}
}
@Test
public void testAlterTableNoRename() throws Exception {
LOG.info("Executing testAlterTableNoRename");
String tblName = "test_alter_table_" + tableType_ + "_tbl";
try {
createDatabaseInCatalog(TEST_DB_NAME);
// drop table if it already exists
catalogHmsClient_.dropTable(TEST_DB_NAME, tblName, true, true);
catalogHmsClient_.createTable(MetastoreApiTestUtils.getTestTable(null,
TEST_DB_NAME, tblName, null, true, tableType_));
HdfsTable tbl = getCatalogHdfsTable(TEST_DB_NAME, tblName);
eventsProcessor_.processEvents();
long lastSkippedEventsCount = eventsProcessor_.getMetrics()
.getCounter(MetastoreEventsProcessor.EVENTS_SKIPPED_METRIC).getCount();
org.apache.hadoop.hive.metastore.api.Table newMsTable =
tbl.getMetaStoreTable().deepCopy();
List<FieldSchema> cols = Lists.newArrayList(
new FieldSchema("c1","string","c1 description"));
org.apache.hadoop.hive.metastore.api.StorageDescriptor updatedSd =
newMsTable.getSd();
updatedSd.setCols(cols);
newMsTable.setSd(updatedSd);
// alter table but not from catalogHMSClient so that it is
// synced up later
try (MetaStoreClient msClient = catalog_.getMetaStoreClient()) {
msClient.getHiveClient().alter_table_with_environmentContext(TEST_DB_NAME,
tblName, newMsTable, null);
}
// assert the cached table's SD is not changed
tbl = getCatalogHdfsTable(TEST_DB_NAME, tblName);
long prevSyncedEventId = tbl.getLastSyncedEventId();
org.apache.hadoop.hive.metastore.api.StorageDescriptor oldSd =
tbl.getMetaStoreTable().getSd();
assertFalse(oldSd.equals(updatedSd));
// get the latest table from metastore and alter it via catalogHmsClient
org.apache.hadoop.hive.metastore.api.Table latestMsTable =
getHmsTable(TEST_DB_NAME, tblName);
String newOwner = "newOwnerForTestAlterTable";
latestMsTable.setOwner(newOwner);
// alter latest table via catalogHMSClient
catalogHmsClient_.alter_table_with_environmentContext(TEST_DB_NAME, tblName,
latestMsTable, null);
// get latest table from the cache
HdfsTable updatedTbl = getCatalogHdfsTable(TEST_DB_NAME, tblName);
assertTrue(updatedTbl.getOwnerUser().equals(newOwner));
assertTrue(updatedTbl.getMetaStoreTable().getSd().equals(updatedSd));
assertTrue(
updatedTbl.getLastSyncedEventId() > prevSyncedEventId);
// assert that alter table events are skipped by event processor
eventsProcessor_.processEvents();
long currentSkippedEventsCount = eventsProcessor_.getMetrics()
.getCounter(MetastoreEventsProcessor.EVENTS_SKIPPED_METRIC).getCount();
assertTrue(lastSkippedEventsCount + 2 == currentSkippedEventsCount);
} finally {
catalogHmsClient_.dropTable(TEST_DB_NAME, tblName, true, true);
}
}
@Test
public void testAlterTableRename() throws Exception {
LOG.info("Executing testALterTableRename");
String tblName = ("test_alter_table_rename_" + tableType_ + "_tbl").toLowerCase();
String newTblName = tblName + "_new";
try {
createDatabaseInCatalog(TEST_DB_NAME);
catalogHmsClient_.dropTable(TEST_DB_NAME, tblName, true, true);
catalogHmsClient_.dropTable(TEST_DB_NAME, newTblName, true, true);
catalogHmsClient_.createTable(MetastoreApiTestUtils.getTestTable(null,
TEST_DB_NAME, tblName, null, true, tableType_));
HdfsTable oldTbl = getCatalogHdfsTable(TEST_DB_NAME, tblName);
eventsProcessor_.processEvents();
long lastSkippedEventsCount = eventsProcessor_.getMetrics()
.getCounter(MetastoreEventsProcessor.EVENTS_SKIPPED_METRIC).getCount();
org.apache.hadoop.hive.metastore.api.Table newMsTable =
oldTbl.getMetaStoreTable().deepCopy();
newMsTable.setTableName(newTblName);
catalogHmsClient_.alter_table_with_environmentContext(TEST_DB_NAME, tblName,
newMsTable, null);
// check that old table does not exist in cache
assertTrue(catalog_.getTableNoThrow(TEST_DB_NAME, tblName) == null);
HdfsTable newTbl = getCatalogHdfsTable(TEST_DB_NAME, newTblName);
assertTrue(newTbl != null);
assertTrue(newTbl.getLastSyncedEventId() > -1);
eventsProcessor_.processEvents();
long currentSkippedEventsCount = eventsProcessor_.getMetrics()
.getCounter(MetastoreEventsProcessor.EVENTS_SKIPPED_METRIC).getCount();
assertTrue( lastSkippedEventsCount + 1 == currentSkippedEventsCount);
} finally {
catalogHmsClient_.dropTable(TEST_DB_NAME, tblName, true, true);
catalogHmsClient_.dropTable(TEST_DB_NAME, newTblName, true, true);
}
}
@Test
public void testSyncToLatestEventIdFlag() throws Exception {
String tblName = "test_sync_to_latest_event_id_flag_" + tableType_ + "_tbl";
LOG.info("Executing testSyncToLatestEventIdFlag");
boolean prevFlag =
BackendConfig.INSTANCE.enableSyncToLatestEventOnDdls();
try {
createDatabaseInCatalog(TEST_DB_NAME);
catalogHmsClient_.dropTable(TEST_DB_NAME, tblName, true, true);
catalogHmsClient_
.createTable(MetastoreApiTestUtils.getTestTable(null,
TEST_DB_NAME, tblName, null, true, tableType_));
HdfsTable tbl = getCatalogHdfsTable(TEST_DB_NAME, tblName);
long lastSyncedEventId = tbl.getLastSyncedEventId();
// set sync to latest event id flag to false so that further
// table is not synced for HMS operations
BackendConfig.INSTANCE.setEnableSyncToLatestEventOnDdls(false);
List<List<String> > partVals = new ArrayList<>();
partVals.add(Arrays.asList("1"));
partVals.add(Arrays.asList("2"));
partVals.add(Arrays.asList("3"));
addPartitionsInHms(TEST_DB_NAME, tblName, partVals);
catalogHmsClient_.dropPartition(TEST_DB_NAME, tblName,
Arrays.asList("3"), true);
tbl = getCatalogHdfsTable(TEST_DB_NAME, tblName);
// with sync to latest event id flag set to false
// last synced event id for a table should not change
assertTrue(lastSyncedEventId == tbl.getLastSyncedEventId());
} finally {
BackendConfig.INSTANCE.setEnableSyncToLatestEventOnDdls(prevFlag);
catalogHmsClient_.dropTable(TEST_DB_NAME, tblName, true, true);
}
}
@Test
public void testFullTableReload() throws Exception {
LOG.info("Executing testFullTableReload");
String tblName = "full_table_reload_test_"+ tableType_ + "_tbl";
try {
createDatabaseInCatalog(TEST_DB_NAME);
catalogHmsClient_.dropTable(TEST_DB_NAME, tblName, true, true);
// create a table in HMS and add it as incomplete table
// so that full table refresh can reload it
createTableInHms(TEST_DB_NAME, tblName, true);
IncompleteTable tbl =
IncompleteTable.createUninitializedTable(catalog_.getDb(TEST_DB_NAME),
tblName, MetadataOp.getImpalaTableType(tableType_), null,
getLatestEventIdFromHMS());
catalog_.addTable(catalog_.getDb(TEST_DB_NAME), tbl);
long prevLastSyncedEventId =
catalog_.getTable(TEST_DB_NAME, tblName).getLastSyncedEventId();
// add partitions but only in HMS so that
// request for full table reload syncs table to latest
// event id
List<List<String> > partVals = new ArrayList<>();
partVals.add(Arrays.asList("1"));
partVals.add(Arrays.asList("2"));
partVals.add(Arrays.asList("3"));
addPartitionsInHms(TEST_DB_NAME, tblName, partVals);
Table refreshedTbl = catalog_.getOrLoadTable(TEST_DB_NAME, tblName,
"testing table syncing to latest event id", null);
assertTrue(
refreshedTbl.getLastSyncedEventId() > refreshedTbl.getCreateEventId());
assertTrue(refreshedTbl.getLastSyncedEventId() > prevLastSyncedEventId);
assertTrue(refreshedTbl instanceof HdfsTable);
HdfsTable hdfsTable = (HdfsTable) refreshedTbl;
assertTrue(hdfsTable.getPartitions().size() == 3);
} finally {
catalogHmsClient_.dropTable(TEST_DB_NAME, tblName, true, true);
}
}
@Test
public void testTableEventsProcessedByEventProcessor() throws Exception {
// TODO: Move this to new file and add more tests
// that cover more events MetastoreEvents
LOG.info("Executing testEventsProcessedByEventProcessor");
String tblName = "test_table_events_processed_by_event_processor_" +
tableType_ + "_tbl";
ExternalEventsProcessor prevEventProcessor =
catalog_.getMetastoreEventProcessor();
try {
catalog_.setMetastoreEventProcessor(eventsProcessor_);
eventsProcessor_.processEvents();
createDatabaseInCatalog(TEST_DB_NAME);
catalogHmsClient_.dropTable(TEST_DB_NAME, tblName, true, true);
catalogHmsClient_
.createTable(MetastoreApiTestUtils.getTestTable(null,
TEST_DB_NAME, tblName, null, true, tableType_));
HdfsTable tbl = getCatalogHdfsTable(TEST_DB_NAME, tblName);
assertTrue(tbl != null);
assertTrue("table's last synced id should not be -1",
tbl.getLastSyncedEventId() != -1);
assertTrue(tbl.getLastSyncedEventId() == tbl.getCreateEventId());
long prevSyncedEventId = tbl.getLastSyncedEventId();
eventsProcessor_.processEvents();
List<List<String> > partVals = new ArrayList<>();
partVals.add(Arrays.asList("1"));
partVals.add(Arrays.asList("2"));
partVals.add(Arrays.asList("3"));
addPartitionsInHms(TEST_DB_NAME, tblName, partVals);
// added partitions should not reflect in table
// stored in catalog cache
assertTrue(tbl.getPartitions().size() == 0);
eventsProcessor_.processEvents();
tbl = getCatalogHdfsTable(TEST_DB_NAME, tblName);
LOG.info("After add partititons, table last synced event id {}, latest "
+ "event id in HMS {}", tbl.getLastSyncedEventId(),
getLatestEventIdFromHMS());
assertTrue(tbl.getLastSyncedEventId() > prevSyncedEventId);
prevSyncedEventId = tbl.getLastSyncedEventId();
// alter partition 2 directly in HMS
Partition partitionToAlter =
getPartitionInHms(TEST_DB_NAME, tblName, Arrays.asList("2"));
String newLocation = "/path/to/newLocation/";
partitionToAlter.getSd().setLocation(newLocation);
alterPartitionInHms(TEST_DB_NAME, tblName, partitionToAlter);
eventsProcessor_.processEvents();
LOG.info("After alter partititons, table last synced event id {}, latest "
+ "event id in HMS {}", tbl.getLastSyncedEventId(),
getLatestEventIdFromHMS());
assertTrue(tbl.getLastSyncedEventId() > prevSyncedEventId);
prevSyncedEventId = tbl.getLastSyncedEventId();
dropPartitionInHms(TEST_DB_NAME, tblName, Arrays.asList("3"), true);
eventsProcessor_.processEvents();
LOG.info("After drop partitions, table last synced event id {}, latest "
+ "event id in HMS {}", tbl.getLastSyncedEventId(),
getLatestEventIdFromHMS());
assertTrue(tbl.getLastSyncedEventId() > prevSyncedEventId);
tbl = getCatalogHdfsTable(TEST_DB_NAME, tblName);
assertTrue("Table should have 2 partitions after dropping 1 "
+ "out of 3 partitions", tbl.getPartitions().size() == 2);
// assert that partition with new location from cached table
// exists
FeFsPartition modifiedPartition = null;
for (FeFsPartition part : tbl.loadAllPartitions()) {
if (part.getLocation().equals(newLocation)) {
modifiedPartition = part;
break;
}
}
assertTrue(modifiedPartition != null);
} finally {
catalog_.setMetastoreEventProcessor(prevEventProcessor);
eventsProcessor_.processEvents();
catalogHmsClient_.dropTable(TEST_DB_NAME, tblName, true, true);
}
}
@Test
public void testDbEventProcessedByEventProcessor() throws Exception {
LOG.info("Executing testDbEventProcessedByEventProcessor");
String dbName = "test_db_event_processed_by_event_processor_db";
ExternalEventsProcessor prevEventProcessor =
catalog_.getMetastoreEventProcessor();
try {
catalog_.setMetastoreEventProcessor(eventsProcessor_);
createDatabaseInCatalog(dbName);
long prevSyncedEventId = catalog_.getDb(dbName).getLastSyncedEventId();
try (MetaStoreClient msClient = catalog_.getMetaStoreClient()) {
MetastoreApiTestUtils.addDatabaseParametersInHms(msClient, dbName,
"key1", "val1");
}
// assert that db's parameters are null
assertTrue(
catalog_.getDb(dbName).getMetaStoreDb().getParameters() == null);
eventsProcessor_.processEvents();
// after processing event, key1 should reflect in msDb parameters
assertTrue(catalog_.getDb(dbName).getMetaStoreDb().getParameters()
.get("key1").equals("val1"));
assertTrue(
catalog_.getDb(dbName).getLastSyncedEventId() > prevSyncedEventId);
} finally {
catalog_.setMetastoreEventProcessor(prevEventProcessor);
catalogHmsClient_.dropDatabase(dbName, true, true, true);
}
}
private void createDatabaseInHms(String catName, String dbName,
Map<String, String> params) throws TException {
try (MetaStoreClient msClient = catalog_.getMetaStoreClient()) {
MetastoreApiTestUtils.createDatabase(msClient,
null, TEST_DB_NAME, null);
}
}
private void createTableInHms(String dbName, String tblName, boolean isPartitioned)
throws TException {
try (MetaStoreClient msClient = catalog_.getMetaStoreClient()) {
MetastoreApiTestUtils.createTable(msClient, null,
dbName, tblName, null, isPartitioned);
}
}
private void addPartitionsInHms(String dbName, String tblName,
List<List<String>> partitionValues) throws TException {
try(MetaStoreClient msClient = catalog_.getMetaStoreClient()) {
MetastoreApiTestUtils.addPartitions(msClient, dbName,
tblName, partitionValues);
}
}
private long getLatestEventIdFromHMS() throws TException {
long lastEventId = -1;
try(MetaStoreClient client = catalog_.getMetaStoreClient()) {
lastEventId =
client.getHiveClient().getCurrentNotificationEventId().getEventId();
}
return lastEventId;
}
private org.apache.hadoop.hive.metastore.api.Table getHmsTable(String dbName,
String tblName) throws TException {
try (MetaStoreClient msClient = catalog_.getMetaStoreClient()) {
return msClient.getHiveClient().getTable(dbName, tblName);
}
}
private org.apache.impala.catalog.HdfsTable getCatalogHdfsTable(String dbName,
String tblName) throws CatalogException{
org.apache.impala.catalog.Table tbl = catalog_.getTable(dbName, tblName);
if (tbl instanceof HdfsTable) {
return (HdfsTable) tbl;
} else {
return null;
}
}
private Partition getPartitionInHms(String dbName, String tblName,
List<String> partVal) throws TException {
try (MetaStoreClient msClient = catalog_.getMetaStoreClient()) {
return msClient.getHiveClient().getPartition(dbName,
tblName, partVal);
}
}
private void alterPartitionInHms(String dbName, String tblName, Partition partition)
throws TException {
try(MetaStoreClient msClient = catalog_.getMetaStoreClient()) {
msClient.getHiveClient().alter_partition(dbName, tblName, partition);
}
}
private void dropPartitionInHms(String dbName, String tblName, List<String> partVals,
boolean deleteData) throws TException {
try(MetaStoreClient msClient = catalog_.getMetaStoreClient()) {
msClient.getHiveClient().dropPartition(dbName, tblName, partVals, deleteData);
}
}
private Database getDatabaseInHms(String dbName) throws TException {
try (MetaStoreClient msClient = catalog_.getMetaStoreClient()) {
return msClient.getHiveClient().getDatabase(dbName);
}
}
private Map<String, String> getPartitionSpec(
org.apache.hadoop.hive.metastore.api.Table tbl, List<String> vals) {
Map<String, String> partitionSpec = new HashMap<>();
for(int i = 0; i < tbl.getPartitionKeys().size(); i++) {
FieldSchema partCol = tbl.getPartitionKeys().get(i);
partitionSpec.put(partCol.getName(), vals.get(i));
}
return partitionSpec;
}
private void createDatabaseInCatalog(String dbName) throws TException {
Database msDb = MetastoreApiTestUtils
.createHmsDatabaseObject(null, dbName, null);
catalogHmsClient_.createDatabase(msDb);
assertTrue("db " + dbName + " not present in catalogd",
catalog_.getDb(dbName) != null);
}
}
|
oracle/graal | 36,838 | substratevm/src/com.oracle.svm.jdwp.bridge/src/com/oracle/svm/jdwp/bridge/jniutils/JNI.java | /*
* Copyright (c) 2024, 2024, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.oracle.svm.jdwp.bridge.jniutils;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import org.graalvm.nativeimage.ImageSingletons;
import org.graalvm.nativeimage.c.CContext;
import org.graalvm.nativeimage.c.function.CFunction.Transition;
import org.graalvm.nativeimage.c.function.CFunctionPointer;
import org.graalvm.nativeimage.c.function.InvokeCFunctionPointer;
import org.graalvm.nativeimage.c.struct.CField;
import org.graalvm.nativeimage.c.struct.CPointerTo;
import org.graalvm.nativeimage.c.struct.CStruct;
import org.graalvm.nativeimage.c.type.CCharPointer;
import org.graalvm.nativeimage.c.type.CDoublePointer;
import org.graalvm.nativeimage.c.type.CFloatPointer;
import org.graalvm.nativeimage.c.type.CIntPointer;
import org.graalvm.nativeimage.c.type.CLongPointer;
import org.graalvm.nativeimage.c.type.CShortPointer;
import org.graalvm.nativeimage.c.type.VoidPointer;
import org.graalvm.word.PointerBase;
public final class JNI {
public static final int JNI_OK = 0;
public static final int JNI_ERR = -1; /* unknown error */
public static final int JNI_EDETACHED = -2; /* thread detached from the VM */
public static final int JNI_EVERSION = -3; /* JNI version error */
public static final int JNI_ENOMEM = -4; /* not enough memory */
public static final int JNI_EEXIST = -5; /* VM already created */
public static final int JNI_EINVAL = -6; /* invalid arguments */
public static final int JNI_VERSION_10 = 0x000a0000;
private JNI() {
throw new IllegalStateException("No instance allowed");
}
public interface JMethodID extends PointerBase {
}
public interface JFieldID extends PointerBase {
}
public interface JObject extends PointerBase {
}
public interface JArray extends JObject {
int MODE_WRITE_RELEASE = 0;
int MODE_WRITE = 1;
int MODE_RELEASE = 2;
}
public interface JBooleanArray extends JArray {
}
public interface JByteArray extends JArray {
}
public interface JCharArray extends JArray {
}
public interface JShortArray extends JArray {
}
public interface JIntArray extends JArray {
}
public interface JLongArray extends JArray {
}
public interface JFloatArray extends JArray {
}
public interface JDoubleArray extends JArray {
}
public interface JObjectArray extends JArray {
}
public interface JClass extends JObject {
}
public interface JString extends JObject {
}
public interface JThrowable extends JObject {
}
public interface JWeak extends JObject {
}
/**
* Access to the {@code jvalue} JNI union.
*
* <pre>
* typedef union jvalue {
* jboolean z;
* jbyte b;
* jchar c;
* jshort s;
* jint i;
* jlong j;
* jfloat f;
* jdouble d;
* jobject l;
* } jvalue;
* </pre>
*/
@CContext(JNIHeaderDirectives.class)
@CStruct("jvalue")
public interface JValue extends PointerBase {
// @formatter:off
@CField("z") boolean getBoolean();
@CField("b") byte getByte();
@CField("c") char getChar();
@CField("s") short getShort();
@CField("i") int getInt();
@CField("j") long getLong();
@CField("f") float getFloat();
@CField("d") double getDouble();
@CField("l") JObject getJObject();
@CField("z") void setBoolean(boolean b);
@CField("b") void setByte(byte b);
@CField("c") void setChar(char ch);
@CField("s") void setShort(short s);
@CField("i") void setInt(int i);
@CField("j") void setLong(long l);
@CField("f") void setFloat(float f);
@CField("d") void setDouble(double d);
@CField("l") void setJObject(JObject obj);
// @formatter:on
/**
* Gets JValue in an array of JValues pointed to by this object.
*/
JValue addressOf(int index);
}
@CContext(JNIHeaderDirectives.class)
@CStruct(value = "JNIEnv_", addStructKeyword = true)
public interface JNIEnv extends PointerBase {
@CField("functions")
JNINativeInterface getFunctions();
}
@CPointerTo(JNIEnv.class)
public interface JNIEnvPointer extends PointerBase {
JNIEnv readJNIEnv();
void writeJNIEnv(JNIEnv env);
}
@CContext(JNIHeaderDirectives.class)
@CStruct(value = "JNINativeInterface_", addStructKeyword = true)
public interface JNINativeInterface extends PointerBase {
@CField("NewString")
NewString getNewString();
@CField("GetStringLength")
GetStringLength getGetStringLength();
@CField("GetStringChars")
GetStringChars getGetStringChars();
@CField("ReleaseStringChars")
ReleaseStringChars getReleaseStringChars();
@CField("NewStringUTF")
NewStringUTF8 getNewStringUTF();
@CField("GetStringUTFLength")
GetStringUTFLength getGetStringUTFLength();
@CField("GetStringUTFChars")
GetStringUTFChars getGetStringUTFChars();
@CField("ReleaseStringUTFChars")
ReleaseStringUTFChars getReleaseStringUTFChars();
@CField("GetArrayLength")
GetArrayLength getGetArrayLength();
@CField("NewLocalRef")
NewLocalRef getNewLocalRef();
@CField("NewObjectArray")
NewObjectArray getNewObjectArray();
@CField("NewBooleanArray")
NewBooleanArray getNewBooleanArray();
@CField("NewByteArray")
NewByteArray getNewByteArray();
@CField("NewCharArray")
NewCharArray getNewCharArray();
@CField("NewShortArray")
NewShortArray getNewShortArray();
@CField("NewIntArray")
NewIntArray getNewIntArray();
@CField("NewLongArray")
NewLongArray getNewLongArray();
@CField("NewFloatArray")
NewFloatArray getNewFloatArray();
@CField("NewDoubleArray")
NewDoubleArray getNewDoubleArray();
@CField("GetObjectArrayElement")
GetObjectArrayElement getGetObjectArrayElement();
@CField("SetObjectArrayElement")
SetObjectArrayElement getSetObjectArrayElement();
@CField("GetBooleanArrayElements")
GetBooleanArrayElements getGetBooleanArrayElements();
@CField("GetByteArrayElements")
GetByteArrayElements getGetByteArrayElements();
@CField("GetCharArrayElements")
GetCharArrayElements getGetCharArrayElements();
@CField("GetShortArrayElements")
GetShortArrayElements getGetShortArrayElements();
@CField("GetIntArrayElements")
GetIntArrayElements getGetIntArrayElements();
@CField("GetLongArrayElements")
GetLongArrayElements getGetLongArrayElements();
@CField("GetFloatArrayElements")
GetFloatArrayElements getGetFloatArrayElements();
@CField("GetDoubleArrayElements")
GetDoubleArrayElements getGetDoubleArrayElements();
@CField("ReleaseBooleanArrayElements")
ReleaseBooleanArrayElements getReleaseBooleanArrayElements();
@CField("ReleaseByteArrayElements")
ReleaseByteArrayElements getReleaseByteArrayElements();
@CField("ReleaseCharArrayElements")
ReleaseCharArrayElements getReleaseCharArrayElements();
@CField("ReleaseShortArrayElements")
ReleaseShortArrayElements getReleaseShortArrayElements();
@CField("ReleaseIntArrayElements")
ReleaseIntArrayElements getReleaseIntArrayElements();
@CField("ReleaseLongArrayElements")
ReleaseLongArrayElements getReleaseLongArrayElements();
@CField("ReleaseFloatArrayElements")
ReleaseFloatArrayElements getReleaseFloatArrayElements();
@CField("ReleaseDoubleArrayElements")
ReleaseDoubleArrayElements getReleaseDoubleArrayElements();
@CField("GetBooleanArrayRegion")
GetBooleanArrayRegion getGetBooleanArrayRegion();
@CField("GetByteArrayRegion")
GetByteArrayRegion getGetByteArrayRegion();
@CField("GetCharArrayRegion")
GetCharArrayRegion getGetCharArrayRegion();
@CField("GetShortArrayRegion")
GetShortArrayRegion getGetShortArrayRegion();
@CField("GetIntArrayRegion")
GetIntArrayRegion getGetIntArrayRegion();
@CField("GetLongArrayRegion")
GetLongArrayRegion getGetLongArrayRegion();
@CField("GetFloatArrayRegion")
GetFloatArrayRegion getGetFloatArrayRegion();
@CField("GetDoubleArrayRegion")
GetDoubleArrayRegion getGetDoubleArrayRegion();
@CField("SetBooleanArrayRegion")
SetBooleanArrayRegion getSetBooleanArrayRegion();
@CField("SetByteArrayRegion")
SetByteArrayRegion getSetByteArrayRegion();
@CField("SetCharArrayRegion")
SetCharArrayRegion getSetCharArrayRegion();
@CField("SetShortArrayRegion")
SetShortArrayRegion getSetShortArrayRegion();
@CField("SetIntArrayRegion")
SetIntArrayRegion getSetIntArrayRegion();
@CField("SetLongArrayRegion")
SetLongArrayRegion getSetLongArrayRegion();
@CField("SetFloatArrayRegion")
SetFloatArrayRegion getSetFloatArrayRegion();
@CField("SetDoubleArrayRegion")
SetDoubleArrayRegion getSetDoubleArrayRegion();
@CField("FindClass")
FindClass getFindClass();
@CField("DefineClass")
DefineClass getDefineClass();
@CField("IsSameObject")
IsSameObject getIsSameObject();
@CField("GetObjectClass")
GetObjectClass getGetObjectClass();
@CField("NewGlobalRef")
NewGlobalRef getNewGlobalRef();
@CField("DeleteGlobalRef")
DeleteGlobalRef getDeleteGlobalRef();
@CField("NewWeakGlobalRef")
NewWeakGlobalRef getNewWeakGlobalRef();
@CField("DeleteWeakGlobalRef")
DeleteWeakGlobalRef getDeleteWeakGlobalRef();
@CField("DeleteLocalRef")
DeleteLocalRef getDeleteLocalRef();
@CField("PushLocalFrame")
PushLocalFrame getPushLocalFrame();
@CField("PopLocalFrame")
PopLocalFrame getPopLocalFrame();
@CField("NewObjectA")
NewObjectA getNewObjectA();
@CField("GetStaticMethodID")
GetStaticMethodID getGetStaticMethodID();
@CField("GetMethodID")
GetMethodID getGetMethodID();
@CField("GetStaticFieldID")
GetStaticFieldID getGetStaticFieldID();
@CField("GetFieldID")
GetFieldID getGetFieldID();
@CField("CallStaticBooleanMethodA")
CallStaticBooleanMethodA getCallStaticBooleanMethodA();
@CField("CallStaticIntMethodA")
CallStaticIntMethodA getCallStaticIntMethodA();
@CField("CallStaticVoidMethodA")
CallStaticVoidMethodA getCallStaticVoidMethodA();
@CField("CallStaticObjectMethodA")
CallStaticObjectMethodA getCallStaticObjectMethodA();
@CField("CallStaticLongMethodA")
CallStaticLongMethodA getCallStaticLongMethodA();
@CField("CallObjectMethodA")
CallObjectMethodA getCallObjectMethodA();
@CField("CallVoidMethodA")
CallVoidMethodA getCallVoidMethodA();
@CField("CallBooleanMethodA")
CallBooleanMethodA getCallBooleanMethodA();
@CField("CallShortMethodA")
CallShortMethodA getCallShortMethodA();
@CField("CallIntMethodA")
CallIntMethodA getCallIntMethodA();
@CField("CallLongMethodA")
CallLongMethodA getCallLongMethodA();
@CField("CallDoubleMethodA")
CallDoubleMethodA getCallDoubleMethodA();
@CField("CallFloatMethodA")
CallFloatMethodA getCallFloatMethodA();
@CField("CallByteMethodA")
CallByteMethodA getCallByteMethodA();
@CField("CallCharMethodA")
CallCharMethodA getCallCharMethodA();
@CField("GetStaticObjectField")
GetStaticObjectField getGetStaticObjectField();
@CField("GetIntField")
GetIntField getGetIntField();
@CField("GetStaticBooleanField")
GetStaticBooleanField getGetStaticBooleanField();
@CField("SetStaticBooleanField")
SetStaticBooleanField getSetStaticBooleanField();
@CField("ExceptionCheck")
ExceptionCheck getExceptionCheck();
@CField("ExceptionOccurred")
ExceptionOccurred getExceptionOccurred();
@CField("ExceptionClear")
ExceptionClear getExceptionClear();
@CField("ExceptionDescribe")
ExceptionDescribe getExceptionDescribe();
@CField("Throw")
Throw getThrow();
@CField("GetObjectRefType")
GetObjectRefType getGetObjectRefType();
@CField("GetDirectBufferAddress")
GetDirectBufferAddress getGetDirectBufferAddress();
@CField("IsInstanceOf")
IsInstanceOf getIsInstanceOf();
@CField("GetJavaVM")
GetJavaVM getGetJavaVM();
}
@CContext(JNIHeaderDirectives.class)
@CStruct(value = "JavaVM_", addStructKeyword = true)
public interface JavaVM extends PointerBase {
@CField("functions")
JNIInvokeInterface getFunctions();
}
@CPointerTo(JavaVM.class)
public interface JavaVMPointer extends PointerBase {
JavaVM readJavaVM();
void writeJavaVM(JavaVM javaVM);
}
@CContext(JNIHeaderDirectives.class)
@CStruct(value = "JavaVMAttachArgs", addStructKeyword = true)
public interface JavaVMAttachArgs extends PointerBase {
@CField("version")
int getVersion();
@CField("version")
void setVersion(int version);
@CField("name")
CCharPointer getName();
@CField("name")
void setName(CCharPointer name);
@CField("group")
JObject getGroup();
@CField("group")
void setGroup(JObject group);
}
@CContext(JNIHeaderDirectives.class)
@CStruct(value = "JNIInvokeInterface_", addStructKeyword = true)
public interface JNIInvokeInterface extends PointerBase {
@CField("AttachCurrentThread")
AttachCurrentThread getAttachCurrentThread();
@CField("AttachCurrentThreadAsDaemon")
AttachCurrentThreadAsDaemon getAttachCurrentThreadAsDaemon();
@CField("DetachCurrentThread")
DetachCurrentThread getDetachCurrentThread();
@CField("GetEnv")
GetEnv getGetEnv();
}
public interface CallStaticIntMethodA extends CFunctionPointer {
@InvokeCFunctionPointer
int call(JNIEnv env, JClass clazz, JMethodID methodID, JValue args);
}
public interface CallStaticBooleanMethodA extends CFunctionPointer {
@InvokeCFunctionPointer
boolean call(JNIEnv env, JClass clazz, JMethodID methodID, JValue args);
}
public interface CallStaticVoidMethodA extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JClass clazz, JMethodID methodID, JValue args);
}
public interface CallStaticObjectMethodA extends CFunctionPointer {
@InvokeCFunctionPointer
JObject call(JNIEnv env, JClass clazz, JMethodID methodID, JValue args);
@InvokeCFunctionPointer(transition = Transition.NO_TRANSITION)
JObject callNoTransition(JNIEnv env, JClass clazz, JMethodID methodID, JValue args);
}
public interface CallStaticLongMethodA extends CFunctionPointer {
@InvokeCFunctionPointer
long call(JNIEnv env, JClass clazz, JMethodID methodID, JValue args);
}
public interface CallObjectMethodA extends CFunctionPointer {
@InvokeCFunctionPointer
JObject call(JNIEnv env, JObject object, JMethodID methodID, JValue args);
}
public interface CallVoidMethodA extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JObject o, JMethodID methodID, JValue args);
}
public interface CallBooleanMethodA extends CFunctionPointer {
@InvokeCFunctionPointer
boolean call(JNIEnv env, JObject o, JMethodID methodID, JValue args);
}
public interface CallShortMethodA extends CFunctionPointer {
@InvokeCFunctionPointer
short call(JNIEnv env, JObject o, JMethodID methodID, JValue args);
}
public interface CallIntMethodA extends CFunctionPointer {
@InvokeCFunctionPointer
int call(JNIEnv env, JObject o, JMethodID methodID, JValue args);
}
public interface CallLongMethodA extends CFunctionPointer {
@InvokeCFunctionPointer
long call(JNIEnv env, JObject o, JMethodID methodID, JValue args);
}
public interface CallDoubleMethodA extends CFunctionPointer {
@InvokeCFunctionPointer
double call(JNIEnv env, JObject o, JMethodID methodID, JValue args);
}
public interface CallFloatMethodA extends CFunctionPointer {
@InvokeCFunctionPointer
float call(JNIEnv env, JObject o, JMethodID methodID, JValue args);
}
public interface CallByteMethodA extends CFunctionPointer {
@InvokeCFunctionPointer
byte call(JNIEnv env, JObject o, JMethodID methodID, JValue args);
}
public interface CallCharMethodA extends CFunctionPointer {
@InvokeCFunctionPointer
char call(JNIEnv env, JObject o, JMethodID methodID, JValue args);
}
public interface DeleteGlobalRef extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JObject gref);
}
public interface DeleteWeakGlobalRef extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JWeak wref);
}
public interface DeleteLocalRef extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JObject lref);
}
public interface PushLocalFrame extends CFunctionPointer {
@InvokeCFunctionPointer
int call(JNIEnv env, int capacity);
}
public interface PopLocalFrame extends CFunctionPointer {
@InvokeCFunctionPointer
JObject call(JNIEnv env, JObject result);
}
public interface ExceptionCheck extends CFunctionPointer {
@InvokeCFunctionPointer
boolean call(JNIEnv env);
@InvokeCFunctionPointer(transition = Transition.NO_TRANSITION)
boolean callNoTransition(JNIEnv env);
}
public interface ExceptionClear extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env);
}
public interface ExceptionDescribe extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env);
@InvokeCFunctionPointer(transition = Transition.NO_TRANSITION)
void callNoTransition(JNIEnv env);
}
public interface ExceptionOccurred extends CFunctionPointer {
@InvokeCFunctionPointer
JThrowable call(JNIEnv env);
}
public interface FindClass extends CFunctionPointer {
@InvokeCFunctionPointer
JClass call(JNIEnv env, CCharPointer name);
@InvokeCFunctionPointer(transition = Transition.NO_TRANSITION)
JClass callNoTransition(JNIEnv env, CCharPointer name);
}
public interface DefineClass extends CFunctionPointer {
@InvokeCFunctionPointer
JClass call(JNIEnv env, CCharPointer name, JObject loader, CCharPointer buf, long bufLen);
}
public interface GetArrayLength extends CFunctionPointer {
@InvokeCFunctionPointer
int call(JNIEnv env, JArray array);
}
public interface GetBooleanArrayElements extends CFunctionPointer {
@InvokeCFunctionPointer
CCharPointer call(JNIEnv env, JBooleanArray array, JValue isCopy);
}
public interface GetByteArrayElements extends CFunctionPointer {
@InvokeCFunctionPointer
CCharPointer call(JNIEnv env, JByteArray array, JValue isCopy);
}
public interface GetCharArrayElements extends CFunctionPointer {
@InvokeCFunctionPointer
CShortPointer call(JNIEnv env, JCharArray array, JValue isCopy);
}
public interface GetShortArrayElements extends CFunctionPointer {
@InvokeCFunctionPointer
CShortPointer call(JNIEnv env, JShortArray array, JValue isCopy);
}
public interface GetIntArrayElements extends CFunctionPointer {
@InvokeCFunctionPointer
CIntPointer call(JNIEnv env, JIntArray array, JValue isCopy);
}
public interface GetLongArrayElements extends CFunctionPointer {
@InvokeCFunctionPointer
CLongPointer call(JNIEnv env, JLongArray array, JValue isCopy);
}
public interface GetFloatArrayElements extends CFunctionPointer {
@InvokeCFunctionPointer
CFloatPointer call(JNIEnv env, JFloatArray array, JValue isCopy);
}
public interface GetDoubleArrayElements extends CFunctionPointer {
@InvokeCFunctionPointer
CDoublePointer call(JNIEnv env, JDoubleArray array, JValue isCopy);
}
public interface GetMethodID extends CFunctionPointer {
@InvokeCFunctionPointer
JMethodID call(JNIEnv env, JClass clazz, CCharPointer name, CCharPointer sig);
@InvokeCFunctionPointer(transition = Transition.NO_TRANSITION)
JMethodID callNoTransition(JNIEnv env, JClass clazz, CCharPointer name, CCharPointer sig);
}
public interface GetObjectArrayElement extends CFunctionPointer {
@InvokeCFunctionPointer
JObject call(JNIEnv env, JObjectArray array, int index);
}
public interface GetObjectClass extends CFunctionPointer {
@InvokeCFunctionPointer
JClass call(JNIEnv env, JObject object);
}
public interface GetObjectRefType extends CFunctionPointer {
@InvokeCFunctionPointer
int call(JNIEnv env, JObject obj);
}
public interface GetStaticMethodID extends CFunctionPointer {
@InvokeCFunctionPointer
JMethodID call(JNIEnv env, JClass clazz, CCharPointer name, CCharPointer sig);
@InvokeCFunctionPointer(transition = Transition.NO_TRANSITION)
JMethodID callNoTransition(JNIEnv env, JClass clazz, CCharPointer name, CCharPointer sig);
}
public interface GetStringChars extends CFunctionPointer {
@InvokeCFunctionPointer
CShortPointer call(JNIEnv env, JString string, JValue isCopy);
}
public interface GetStringLength extends CFunctionPointer {
@InvokeCFunctionPointer
int call(JNIEnv env, JString string);
}
public interface GetStringUTFChars extends CFunctionPointer {
@InvokeCFunctionPointer
CCharPointer call(JNIEnv env, JString string, JValue isCopy);
}
public interface GetStringUTFLength extends CFunctionPointer {
@InvokeCFunctionPointer
int call(JNIEnv env, JString str);
}
public interface IsSameObject extends CFunctionPointer {
@InvokeCFunctionPointer
boolean call(JNIEnv env, JObject ref1, JObject ref2);
}
public interface NewBooleanArray extends CFunctionPointer {
@InvokeCFunctionPointer
JBooleanArray call(JNIEnv env, int len);
}
public interface NewByteArray extends CFunctionPointer {
@InvokeCFunctionPointer
JByteArray call(JNIEnv env, int len);
}
public interface NewCharArray extends CFunctionPointer {
@InvokeCFunctionPointer
JCharArray call(JNIEnv env, int len);
}
public interface NewShortArray extends CFunctionPointer {
@InvokeCFunctionPointer
JShortArray call(JNIEnv env, int len);
}
public interface NewIntArray extends CFunctionPointer {
@InvokeCFunctionPointer
JIntArray call(JNIEnv env, int len);
}
public interface NewLongArray extends CFunctionPointer {
@InvokeCFunctionPointer
JLongArray call(JNIEnv env, int len);
}
public interface NewFloatArray extends CFunctionPointer {
@InvokeCFunctionPointer
JFloatArray call(JNIEnv env, int len);
}
public interface NewDoubleArray extends CFunctionPointer {
@InvokeCFunctionPointer
JDoubleArray call(JNIEnv env, int len);
}
public interface NewGlobalRef extends CFunctionPointer {
@InvokeCFunctionPointer
JObject call(JNIEnv env, JObject lobj);
}
public interface NewWeakGlobalRef extends CFunctionPointer {
@InvokeCFunctionPointer
JWeak call(JNIEnv env, JObject lobj);
}
public interface NewObjectA extends CFunctionPointer {
@InvokeCFunctionPointer
JObject call(JNIEnv env, JClass clazz, JMethodID methodID, JValue args);
@InvokeCFunctionPointer(transition = Transition.NO_TRANSITION)
JObject callNoTransition(JNIEnv env, JClass clazz, JMethodID methodID, JValue args);
}
public interface NewLocalRef extends CFunctionPointer {
@InvokeCFunctionPointer
JObject call(JNIEnv env, JObject obj);
}
public interface NewObjectArray extends CFunctionPointer {
@InvokeCFunctionPointer
JObjectArray call(JNIEnv env, int len, JClass clazz, JObject init);
}
public interface NewString extends CFunctionPointer {
@InvokeCFunctionPointer
JString call(JNIEnv env, CShortPointer unicode, int len);
}
public interface NewStringUTF8 extends CFunctionPointer {
@InvokeCFunctionPointer
JString call(JNIEnv env, CCharPointer bytes);
@InvokeCFunctionPointer(transition = Transition.NO_TRANSITION)
JString callNoTransition(JNIEnv env, CCharPointer bytes);
}
public interface ReleaseBooleanArrayElements extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JBooleanArray array, CCharPointer elems, int mode);
}
public interface ReleaseByteArrayElements extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JByteArray array, CCharPointer elems, int mode);
}
public interface ReleaseCharArrayElements extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JCharArray array, CShortPointer elems, int mode);
}
public interface ReleaseShortArrayElements extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JShortArray array, CShortPointer elems, int mode);
}
public interface ReleaseIntArrayElements extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JIntArray array, CIntPointer elems, int mode);
}
public interface ReleaseLongArrayElements extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JLongArray array, CLongPointer elems, int mode);
}
public interface ReleaseFloatArrayElements extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JFloatArray array, CFloatPointer elems, int mode);
}
public interface ReleaseDoubleArrayElements extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JDoubleArray array, CDoublePointer elems, int mode);
}
public interface GetBooleanArrayRegion extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JBooleanArray array, int start, int len, CCharPointer buf);
}
public interface GetByteArrayRegion extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JByteArray array, int start, int len, CCharPointer buf);
}
public interface GetCharArrayRegion extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JCharArray array, int start, int len, CShortPointer buf);
}
public interface GetShortArrayRegion extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JShortArray array, int start, int len, CShortPointer buf);
}
public interface GetIntArrayRegion extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JIntArray array, int start, int len, CIntPointer buf);
}
public interface GetLongArrayRegion extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JLongArray array, int start, int len, CLongPointer buf);
}
public interface GetFloatArrayRegion extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JFloatArray array, int start, int len, CFloatPointer buf);
}
public interface GetDoubleArrayRegion extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JDoubleArray array, int start, int len, CDoublePointer buf);
}
public interface SetBooleanArrayRegion extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JBooleanArray array, int start, int len, CCharPointer buf);
}
public interface SetByteArrayRegion extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JByteArray array, int start, int len, CCharPointer buf);
}
public interface SetCharArrayRegion extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JCharArray array, int start, int len, CShortPointer buf);
}
public interface SetShortArrayRegion extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JShortArray array, int start, int len, CShortPointer buf);
}
public interface SetIntArrayRegion extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JIntArray array, int start, int len, CIntPointer buf);
}
public interface SetLongArrayRegion extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JLongArray array, int start, int len, CLongPointer buf);
}
public interface SetFloatArrayRegion extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JFloatArray array, int start, int len, CFloatPointer buf);
}
public interface SetDoubleArrayRegion extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JDoubleArray array, int start, int len, CDoublePointer buf);
}
public interface ReleaseStringChars extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JString string, CShortPointer chars);
}
public interface ReleaseStringUTFChars extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JString string, CCharPointer chars);
}
public interface SetObjectArrayElement extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JObjectArray array, int index, JObject val);
}
public interface Throw extends CFunctionPointer {
@InvokeCFunctionPointer
int call(JNIEnv env, JThrowable throwable);
@InvokeCFunctionPointer(transition = Transition.NO_TRANSITION)
int callNoTransition(JNIEnv env, JThrowable throwable);
}
public interface GetDirectBufferAddress extends CFunctionPointer {
@InvokeCFunctionPointer
VoidPointer call(JNIEnv env, JObject buf);
}
public interface IsInstanceOf extends CFunctionPointer {
@InvokeCFunctionPointer
boolean call(JNIEnv env, JObject o, JClass c);
}
public interface GetStaticFieldID extends CFunctionPointer {
@InvokeCFunctionPointer
JFieldID call(JNIEnv env, JClass clazz, CCharPointer name, CCharPointer sig);
}
public interface GetFieldID extends CFunctionPointer {
@InvokeCFunctionPointer
JFieldID call(JNIEnv env, JClass c, CCharPointer name, CCharPointer sig);
}
public interface GetStaticObjectField extends CFunctionPointer {
@InvokeCFunctionPointer
JObject call(JNIEnv env, JClass clazz, JFieldID fieldID);
}
public interface GetIntField extends CFunctionPointer {
@InvokeCFunctionPointer
int call(JNIEnv env, JObject o, JFieldID fieldId);
}
public interface GetStaticBooleanField extends CFunctionPointer {
@InvokeCFunctionPointer
boolean call(JNIEnv env, JClass clazz, JFieldID fieldID);
}
public interface SetStaticBooleanField extends CFunctionPointer {
@InvokeCFunctionPointer
void call(JNIEnv env, JClass clazz, JFieldID fieldID, boolean value);
}
public interface GetJavaVM extends CFunctionPointer {
@InvokeCFunctionPointer
int call(JNIEnv env, JavaVMPointer javaVMOut);
}
public interface AttachCurrentThread extends CFunctionPointer {
@InvokeCFunctionPointer
int call(JavaVM vm, JNIEnvPointer envOut, JavaVMAttachArgs args);
}
public interface AttachCurrentThreadAsDaemon extends CFunctionPointer {
@InvokeCFunctionPointer
int call(JavaVM vm, JNIEnvPointer envOut, JavaVMAttachArgs args);
}
public interface DetachCurrentThread extends CFunctionPointer {
@InvokeCFunctionPointer
int call(JavaVM vm);
}
public interface GetEnv extends CFunctionPointer {
@InvokeCFunctionPointer
int call(JavaVM vm, JNIEnvPointer envOut, int version);
}
static class JNIHeaderDirectives implements CContext.Directives {
private static final String[] INCLUDES = {"jni.h", "jni_md.h"};
@Override
public boolean isInConfiguration() {
return ImageSingletons.contains(NativeBridgeSupport.class);
}
@Override
public List<String> getOptions() {
return Arrays.stream(findJNIHeaders()).map((p) -> "-I" + p.getParent()).collect(Collectors.toList());
}
@Override
public List<String> getHeaderFiles() {
return Arrays.stream(findJNIHeaders()).map((p) -> '<' + p.toString() + '>').collect(Collectors.toList());
}
private static Path[] findJNIHeaders() {
Path javaHome = Paths.get(System.getProperty("java.home"));
Path includeFolder = javaHome.resolve("include");
if (!Files.exists(includeFolder)) {
Path parent = javaHome.getParent();
if (parent != null) {
javaHome = parent;
}
}
includeFolder = javaHome.resolve("include");
if (!Files.exists(includeFolder)) {
throw new IllegalStateException("Cannot find 'include' folder in JDK.");
}
Path[] res = new Path[INCLUDES.length];
try {
for (int i = 0; i < INCLUDES.length; i++) {
String include = INCLUDES[i];
Optional<Path> includeFile = Files.find(includeFolder, 2, (p, _) -> include.equals(p.getFileName().toString())).findFirst();
if (!includeFile.isPresent()) {
throw new IllegalStateException("Include: " + res[i] + " does not exist.");
}
res[i] = includeFile.get();
}
return res;
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
}
}
}
|
googleapis/google-cloud-java | 37,029 | java-dms/proto-google-cloud-dms-v1/src/main/java/com/google/cloud/clouddms/v1/ListMappingRulesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/clouddms/v1/clouddms.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.clouddms.v1;
/**
*
*
* <pre>
* Response message for 'ListMappingRulesRequest' request.
* </pre>
*
* Protobuf type {@code google.cloud.clouddms.v1.ListMappingRulesResponse}
*/
public final class ListMappingRulesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.clouddms.v1.ListMappingRulesResponse)
ListMappingRulesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListMappingRulesResponse.newBuilder() to construct.
private ListMappingRulesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListMappingRulesResponse() {
mappingRules_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListMappingRulesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.clouddms.v1.ClouddmsProto
.internal_static_google_cloud_clouddms_v1_ListMappingRulesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.clouddms.v1.ClouddmsProto
.internal_static_google_cloud_clouddms_v1_ListMappingRulesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.clouddms.v1.ListMappingRulesResponse.class,
com.google.cloud.clouddms.v1.ListMappingRulesResponse.Builder.class);
}
public static final int MAPPING_RULES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.clouddms.v1.MappingRule> mappingRules_;
/**
*
*
* <pre>
* The list of conversion workspace mapping rules.
* </pre>
*
* <code>repeated .google.cloud.clouddms.v1.MappingRule mapping_rules = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.clouddms.v1.MappingRule> getMappingRulesList() {
return mappingRules_;
}
/**
*
*
* <pre>
* The list of conversion workspace mapping rules.
* </pre>
*
* <code>repeated .google.cloud.clouddms.v1.MappingRule mapping_rules = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.clouddms.v1.MappingRuleOrBuilder>
getMappingRulesOrBuilderList() {
return mappingRules_;
}
/**
*
*
* <pre>
* The list of conversion workspace mapping rules.
* </pre>
*
* <code>repeated .google.cloud.clouddms.v1.MappingRule mapping_rules = 1;</code>
*/
@java.lang.Override
public int getMappingRulesCount() {
return mappingRules_.size();
}
/**
*
*
* <pre>
* The list of conversion workspace mapping rules.
* </pre>
*
* <code>repeated .google.cloud.clouddms.v1.MappingRule mapping_rules = 1;</code>
*/
@java.lang.Override
public com.google.cloud.clouddms.v1.MappingRule getMappingRules(int index) {
return mappingRules_.get(index);
}
/**
*
*
* <pre>
* The list of conversion workspace mapping rules.
* </pre>
*
* <code>repeated .google.cloud.clouddms.v1.MappingRule mapping_rules = 1;</code>
*/
@java.lang.Override
public com.google.cloud.clouddms.v1.MappingRuleOrBuilder getMappingRulesOrBuilder(int index) {
return mappingRules_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < mappingRules_.size(); i++) {
output.writeMessage(1, mappingRules_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < mappingRules_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, mappingRules_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.clouddms.v1.ListMappingRulesResponse)) {
return super.equals(obj);
}
com.google.cloud.clouddms.v1.ListMappingRulesResponse other =
(com.google.cloud.clouddms.v1.ListMappingRulesResponse) obj;
if (!getMappingRulesList().equals(other.getMappingRulesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getMappingRulesCount() > 0) {
hash = (37 * hash) + MAPPING_RULES_FIELD_NUMBER;
hash = (53 * hash) + getMappingRulesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.clouddms.v1.ListMappingRulesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.clouddms.v1.ListMappingRulesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.clouddms.v1.ListMappingRulesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.clouddms.v1.ListMappingRulesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.clouddms.v1.ListMappingRulesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.clouddms.v1.ListMappingRulesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.clouddms.v1.ListMappingRulesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.clouddms.v1.ListMappingRulesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.clouddms.v1.ListMappingRulesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.clouddms.v1.ListMappingRulesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.clouddms.v1.ListMappingRulesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.clouddms.v1.ListMappingRulesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.clouddms.v1.ListMappingRulesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for 'ListMappingRulesRequest' request.
* </pre>
*
* Protobuf type {@code google.cloud.clouddms.v1.ListMappingRulesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.clouddms.v1.ListMappingRulesResponse)
com.google.cloud.clouddms.v1.ListMappingRulesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.clouddms.v1.ClouddmsProto
.internal_static_google_cloud_clouddms_v1_ListMappingRulesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.clouddms.v1.ClouddmsProto
.internal_static_google_cloud_clouddms_v1_ListMappingRulesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.clouddms.v1.ListMappingRulesResponse.class,
com.google.cloud.clouddms.v1.ListMappingRulesResponse.Builder.class);
}
// Construct using com.google.cloud.clouddms.v1.ListMappingRulesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (mappingRulesBuilder_ == null) {
mappingRules_ = java.util.Collections.emptyList();
} else {
mappingRules_ = null;
mappingRulesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.clouddms.v1.ClouddmsProto
.internal_static_google_cloud_clouddms_v1_ListMappingRulesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.clouddms.v1.ListMappingRulesResponse getDefaultInstanceForType() {
return com.google.cloud.clouddms.v1.ListMappingRulesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.clouddms.v1.ListMappingRulesResponse build() {
com.google.cloud.clouddms.v1.ListMappingRulesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.clouddms.v1.ListMappingRulesResponse buildPartial() {
com.google.cloud.clouddms.v1.ListMappingRulesResponse result =
new com.google.cloud.clouddms.v1.ListMappingRulesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.clouddms.v1.ListMappingRulesResponse result) {
if (mappingRulesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
mappingRules_ = java.util.Collections.unmodifiableList(mappingRules_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.mappingRules_ = mappingRules_;
} else {
result.mappingRules_ = mappingRulesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.clouddms.v1.ListMappingRulesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.clouddms.v1.ListMappingRulesResponse) {
return mergeFrom((com.google.cloud.clouddms.v1.ListMappingRulesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.clouddms.v1.ListMappingRulesResponse other) {
if (other == com.google.cloud.clouddms.v1.ListMappingRulesResponse.getDefaultInstance())
return this;
if (mappingRulesBuilder_ == null) {
if (!other.mappingRules_.isEmpty()) {
if (mappingRules_.isEmpty()) {
mappingRules_ = other.mappingRules_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureMappingRulesIsMutable();
mappingRules_.addAll(other.mappingRules_);
}
onChanged();
}
} else {
if (!other.mappingRules_.isEmpty()) {
if (mappingRulesBuilder_.isEmpty()) {
mappingRulesBuilder_.dispose();
mappingRulesBuilder_ = null;
mappingRules_ = other.mappingRules_;
bitField0_ = (bitField0_ & ~0x00000001);
mappingRulesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getMappingRulesFieldBuilder()
: null;
} else {
mappingRulesBuilder_.addAllMessages(other.mappingRules_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.clouddms.v1.MappingRule m =
input.readMessage(
com.google.cloud.clouddms.v1.MappingRule.parser(), extensionRegistry);
if (mappingRulesBuilder_ == null) {
ensureMappingRulesIsMutable();
mappingRules_.add(m);
} else {
mappingRulesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.clouddms.v1.MappingRule> mappingRules_ =
java.util.Collections.emptyList();
private void ensureMappingRulesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
mappingRules_ =
new java.util.ArrayList<com.google.cloud.clouddms.v1.MappingRule>(mappingRules_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.clouddms.v1.MappingRule,
com.google.cloud.clouddms.v1.MappingRule.Builder,
com.google.cloud.clouddms.v1.MappingRuleOrBuilder>
mappingRulesBuilder_;
/**
*
*
* <pre>
* The list of conversion workspace mapping rules.
* </pre>
*
* <code>repeated .google.cloud.clouddms.v1.MappingRule mapping_rules = 1;</code>
*/
public java.util.List<com.google.cloud.clouddms.v1.MappingRule> getMappingRulesList() {
if (mappingRulesBuilder_ == null) {
return java.util.Collections.unmodifiableList(mappingRules_);
} else {
return mappingRulesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of conversion workspace mapping rules.
* </pre>
*
* <code>repeated .google.cloud.clouddms.v1.MappingRule mapping_rules = 1;</code>
*/
public int getMappingRulesCount() {
if (mappingRulesBuilder_ == null) {
return mappingRules_.size();
} else {
return mappingRulesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of conversion workspace mapping rules.
* </pre>
*
* <code>repeated .google.cloud.clouddms.v1.MappingRule mapping_rules = 1;</code>
*/
public com.google.cloud.clouddms.v1.MappingRule getMappingRules(int index) {
if (mappingRulesBuilder_ == null) {
return mappingRules_.get(index);
} else {
return mappingRulesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of conversion workspace mapping rules.
* </pre>
*
* <code>repeated .google.cloud.clouddms.v1.MappingRule mapping_rules = 1;</code>
*/
public Builder setMappingRules(int index, com.google.cloud.clouddms.v1.MappingRule value) {
if (mappingRulesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMappingRulesIsMutable();
mappingRules_.set(index, value);
onChanged();
} else {
mappingRulesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of conversion workspace mapping rules.
* </pre>
*
* <code>repeated .google.cloud.clouddms.v1.MappingRule mapping_rules = 1;</code>
*/
public Builder setMappingRules(
int index, com.google.cloud.clouddms.v1.MappingRule.Builder builderForValue) {
if (mappingRulesBuilder_ == null) {
ensureMappingRulesIsMutable();
mappingRules_.set(index, builderForValue.build());
onChanged();
} else {
mappingRulesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of conversion workspace mapping rules.
* </pre>
*
* <code>repeated .google.cloud.clouddms.v1.MappingRule mapping_rules = 1;</code>
*/
public Builder addMappingRules(com.google.cloud.clouddms.v1.MappingRule value) {
if (mappingRulesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMappingRulesIsMutable();
mappingRules_.add(value);
onChanged();
} else {
mappingRulesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of conversion workspace mapping rules.
* </pre>
*
* <code>repeated .google.cloud.clouddms.v1.MappingRule mapping_rules = 1;</code>
*/
public Builder addMappingRules(int index, com.google.cloud.clouddms.v1.MappingRule value) {
if (mappingRulesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMappingRulesIsMutable();
mappingRules_.add(index, value);
onChanged();
} else {
mappingRulesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of conversion workspace mapping rules.
* </pre>
*
* <code>repeated .google.cloud.clouddms.v1.MappingRule mapping_rules = 1;</code>
*/
public Builder addMappingRules(
com.google.cloud.clouddms.v1.MappingRule.Builder builderForValue) {
if (mappingRulesBuilder_ == null) {
ensureMappingRulesIsMutable();
mappingRules_.add(builderForValue.build());
onChanged();
} else {
mappingRulesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of conversion workspace mapping rules.
* </pre>
*
* <code>repeated .google.cloud.clouddms.v1.MappingRule mapping_rules = 1;</code>
*/
public Builder addMappingRules(
int index, com.google.cloud.clouddms.v1.MappingRule.Builder builderForValue) {
if (mappingRulesBuilder_ == null) {
ensureMappingRulesIsMutable();
mappingRules_.add(index, builderForValue.build());
onChanged();
} else {
mappingRulesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of conversion workspace mapping rules.
* </pre>
*
* <code>repeated .google.cloud.clouddms.v1.MappingRule mapping_rules = 1;</code>
*/
public Builder addAllMappingRules(
java.lang.Iterable<? extends com.google.cloud.clouddms.v1.MappingRule> values) {
if (mappingRulesBuilder_ == null) {
ensureMappingRulesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, mappingRules_);
onChanged();
} else {
mappingRulesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of conversion workspace mapping rules.
* </pre>
*
* <code>repeated .google.cloud.clouddms.v1.MappingRule mapping_rules = 1;</code>
*/
public Builder clearMappingRules() {
if (mappingRulesBuilder_ == null) {
mappingRules_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
mappingRulesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of conversion workspace mapping rules.
* </pre>
*
* <code>repeated .google.cloud.clouddms.v1.MappingRule mapping_rules = 1;</code>
*/
public Builder removeMappingRules(int index) {
if (mappingRulesBuilder_ == null) {
ensureMappingRulesIsMutable();
mappingRules_.remove(index);
onChanged();
} else {
mappingRulesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of conversion workspace mapping rules.
* </pre>
*
* <code>repeated .google.cloud.clouddms.v1.MappingRule mapping_rules = 1;</code>
*/
public com.google.cloud.clouddms.v1.MappingRule.Builder getMappingRulesBuilder(int index) {
return getMappingRulesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of conversion workspace mapping rules.
* </pre>
*
* <code>repeated .google.cloud.clouddms.v1.MappingRule mapping_rules = 1;</code>
*/
public com.google.cloud.clouddms.v1.MappingRuleOrBuilder getMappingRulesOrBuilder(int index) {
if (mappingRulesBuilder_ == null) {
return mappingRules_.get(index);
} else {
return mappingRulesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of conversion workspace mapping rules.
* </pre>
*
* <code>repeated .google.cloud.clouddms.v1.MappingRule mapping_rules = 1;</code>
*/
public java.util.List<? extends com.google.cloud.clouddms.v1.MappingRuleOrBuilder>
getMappingRulesOrBuilderList() {
if (mappingRulesBuilder_ != null) {
return mappingRulesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(mappingRules_);
}
}
/**
*
*
* <pre>
* The list of conversion workspace mapping rules.
* </pre>
*
* <code>repeated .google.cloud.clouddms.v1.MappingRule mapping_rules = 1;</code>
*/
public com.google.cloud.clouddms.v1.MappingRule.Builder addMappingRulesBuilder() {
return getMappingRulesFieldBuilder()
.addBuilder(com.google.cloud.clouddms.v1.MappingRule.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of conversion workspace mapping rules.
* </pre>
*
* <code>repeated .google.cloud.clouddms.v1.MappingRule mapping_rules = 1;</code>
*/
public com.google.cloud.clouddms.v1.MappingRule.Builder addMappingRulesBuilder(int index) {
return getMappingRulesFieldBuilder()
.addBuilder(index, com.google.cloud.clouddms.v1.MappingRule.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of conversion workspace mapping rules.
* </pre>
*
* <code>repeated .google.cloud.clouddms.v1.MappingRule mapping_rules = 1;</code>
*/
public java.util.List<com.google.cloud.clouddms.v1.MappingRule.Builder>
getMappingRulesBuilderList() {
return getMappingRulesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.clouddms.v1.MappingRule,
com.google.cloud.clouddms.v1.MappingRule.Builder,
com.google.cloud.clouddms.v1.MappingRuleOrBuilder>
getMappingRulesFieldBuilder() {
if (mappingRulesBuilder_ == null) {
mappingRulesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.clouddms.v1.MappingRule,
com.google.cloud.clouddms.v1.MappingRule.Builder,
com.google.cloud.clouddms.v1.MappingRuleOrBuilder>(
mappingRules_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
mappingRules_ = null;
}
return mappingRulesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.clouddms.v1.ListMappingRulesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.clouddms.v1.ListMappingRulesResponse)
private static final com.google.cloud.clouddms.v1.ListMappingRulesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.clouddms.v1.ListMappingRulesResponse();
}
public static com.google.cloud.clouddms.v1.ListMappingRulesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListMappingRulesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListMappingRulesResponse>() {
@java.lang.Override
public ListMappingRulesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListMappingRulesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListMappingRulesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.clouddms.v1.ListMappingRulesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,982 | java-service-usage/proto-google-cloud-service-usage-v1beta1/src/main/java/com/google/api/serviceusage/v1beta1/ListAdminOverridesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/api/serviceusage/v1beta1/serviceusage.proto
// Protobuf Java Version: 3.25.8
package com.google.api.serviceusage.v1beta1;
/**
*
*
* <pre>
* Response message for ListAdminOverrides.
* </pre>
*
* Protobuf type {@code google.api.serviceusage.v1beta1.ListAdminOverridesResponse}
*/
public final class ListAdminOverridesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.api.serviceusage.v1beta1.ListAdminOverridesResponse)
ListAdminOverridesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListAdminOverridesResponse.newBuilder() to construct.
private ListAdminOverridesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListAdminOverridesResponse() {
overrides_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListAdminOverridesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.api.serviceusage.v1beta1.ServiceUsageProto
.internal_static_google_api_serviceusage_v1beta1_ListAdminOverridesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.serviceusage.v1beta1.ServiceUsageProto
.internal_static_google_api_serviceusage_v1beta1_ListAdminOverridesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse.class,
com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse.Builder.class);
}
public static final int OVERRIDES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.api.serviceusage.v1beta1.QuotaOverride> overrides_;
/**
*
*
* <pre>
* Admin overrides on this limit.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.api.serviceusage.v1beta1.QuotaOverride> getOverridesList() {
return overrides_;
}
/**
*
*
* <pre>
* Admin overrides on this limit.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.api.serviceusage.v1beta1.QuotaOverrideOrBuilder>
getOverridesOrBuilderList() {
return overrides_;
}
/**
*
*
* <pre>
* Admin overrides on this limit.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
@java.lang.Override
public int getOverridesCount() {
return overrides_.size();
}
/**
*
*
* <pre>
* Admin overrides on this limit.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
@java.lang.Override
public com.google.api.serviceusage.v1beta1.QuotaOverride getOverrides(int index) {
return overrides_.get(index);
}
/**
*
*
* <pre>
* Admin overrides on this limit.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
@java.lang.Override
public com.google.api.serviceusage.v1beta1.QuotaOverrideOrBuilder getOverridesOrBuilder(
int index) {
return overrides_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token identifying which result to start with; returned by a previous list
* call.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token identifying which result to start with; returned by a previous list
* call.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < overrides_.size(); i++) {
output.writeMessage(1, overrides_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < overrides_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, overrides_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse)) {
return super.equals(obj);
}
com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse other =
(com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse) obj;
if (!getOverridesList().equals(other.getOverridesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getOverridesCount() > 0) {
hash = (37 * hash) + OVERRIDES_FIELD_NUMBER;
hash = (53 * hash) + getOverridesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for ListAdminOverrides.
* </pre>
*
* Protobuf type {@code google.api.serviceusage.v1beta1.ListAdminOverridesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.api.serviceusage.v1beta1.ListAdminOverridesResponse)
com.google.api.serviceusage.v1beta1.ListAdminOverridesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.api.serviceusage.v1beta1.ServiceUsageProto
.internal_static_google_api_serviceusage_v1beta1_ListAdminOverridesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.serviceusage.v1beta1.ServiceUsageProto
.internal_static_google_api_serviceusage_v1beta1_ListAdminOverridesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse.class,
com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse.Builder.class);
}
// Construct using com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (overridesBuilder_ == null) {
overrides_ = java.util.Collections.emptyList();
} else {
overrides_ = null;
overridesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.api.serviceusage.v1beta1.ServiceUsageProto
.internal_static_google_api_serviceusage_v1beta1_ListAdminOverridesResponse_descriptor;
}
@java.lang.Override
public com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse
getDefaultInstanceForType() {
return com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse build() {
com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse buildPartial() {
com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse result =
new com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse result) {
if (overridesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
overrides_ = java.util.Collections.unmodifiableList(overrides_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.overrides_ = overrides_;
} else {
result.overrides_ = overridesBuilder_.build();
}
}
private void buildPartial0(
com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse) {
return mergeFrom((com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse other) {
if (other
== com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse.getDefaultInstance())
return this;
if (overridesBuilder_ == null) {
if (!other.overrides_.isEmpty()) {
if (overrides_.isEmpty()) {
overrides_ = other.overrides_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureOverridesIsMutable();
overrides_.addAll(other.overrides_);
}
onChanged();
}
} else {
if (!other.overrides_.isEmpty()) {
if (overridesBuilder_.isEmpty()) {
overridesBuilder_.dispose();
overridesBuilder_ = null;
overrides_ = other.overrides_;
bitField0_ = (bitField0_ & ~0x00000001);
overridesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getOverridesFieldBuilder()
: null;
} else {
overridesBuilder_.addAllMessages(other.overrides_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.api.serviceusage.v1beta1.QuotaOverride m =
input.readMessage(
com.google.api.serviceusage.v1beta1.QuotaOverride.parser(),
extensionRegistry);
if (overridesBuilder_ == null) {
ensureOverridesIsMutable();
overrides_.add(m);
} else {
overridesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.api.serviceusage.v1beta1.QuotaOverride> overrides_ =
java.util.Collections.emptyList();
private void ensureOverridesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
overrides_ =
new java.util.ArrayList<com.google.api.serviceusage.v1beta1.QuotaOverride>(overrides_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.api.serviceusage.v1beta1.QuotaOverride,
com.google.api.serviceusage.v1beta1.QuotaOverride.Builder,
com.google.api.serviceusage.v1beta1.QuotaOverrideOrBuilder>
overridesBuilder_;
/**
*
*
* <pre>
* Admin overrides on this limit.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public java.util.List<com.google.api.serviceusage.v1beta1.QuotaOverride> getOverridesList() {
if (overridesBuilder_ == null) {
return java.util.Collections.unmodifiableList(overrides_);
} else {
return overridesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Admin overrides on this limit.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public int getOverridesCount() {
if (overridesBuilder_ == null) {
return overrides_.size();
} else {
return overridesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Admin overrides on this limit.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public com.google.api.serviceusage.v1beta1.QuotaOverride getOverrides(int index) {
if (overridesBuilder_ == null) {
return overrides_.get(index);
} else {
return overridesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Admin overrides on this limit.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public Builder setOverrides(
int index, com.google.api.serviceusage.v1beta1.QuotaOverride value) {
if (overridesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOverridesIsMutable();
overrides_.set(index, value);
onChanged();
} else {
overridesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Admin overrides on this limit.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public Builder setOverrides(
int index, com.google.api.serviceusage.v1beta1.QuotaOverride.Builder builderForValue) {
if (overridesBuilder_ == null) {
ensureOverridesIsMutable();
overrides_.set(index, builderForValue.build());
onChanged();
} else {
overridesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Admin overrides on this limit.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public Builder addOverrides(com.google.api.serviceusage.v1beta1.QuotaOverride value) {
if (overridesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOverridesIsMutable();
overrides_.add(value);
onChanged();
} else {
overridesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Admin overrides on this limit.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public Builder addOverrides(
int index, com.google.api.serviceusage.v1beta1.QuotaOverride value) {
if (overridesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOverridesIsMutable();
overrides_.add(index, value);
onChanged();
} else {
overridesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Admin overrides on this limit.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public Builder addOverrides(
com.google.api.serviceusage.v1beta1.QuotaOverride.Builder builderForValue) {
if (overridesBuilder_ == null) {
ensureOverridesIsMutable();
overrides_.add(builderForValue.build());
onChanged();
} else {
overridesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Admin overrides on this limit.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public Builder addOverrides(
int index, com.google.api.serviceusage.v1beta1.QuotaOverride.Builder builderForValue) {
if (overridesBuilder_ == null) {
ensureOverridesIsMutable();
overrides_.add(index, builderForValue.build());
onChanged();
} else {
overridesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Admin overrides on this limit.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public Builder addAllOverrides(
java.lang.Iterable<? extends com.google.api.serviceusage.v1beta1.QuotaOverride> values) {
if (overridesBuilder_ == null) {
ensureOverridesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, overrides_);
onChanged();
} else {
overridesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Admin overrides on this limit.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public Builder clearOverrides() {
if (overridesBuilder_ == null) {
overrides_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
overridesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Admin overrides on this limit.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public Builder removeOverrides(int index) {
if (overridesBuilder_ == null) {
ensureOverridesIsMutable();
overrides_.remove(index);
onChanged();
} else {
overridesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Admin overrides on this limit.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public com.google.api.serviceusage.v1beta1.QuotaOverride.Builder getOverridesBuilder(
int index) {
return getOverridesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Admin overrides on this limit.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public com.google.api.serviceusage.v1beta1.QuotaOverrideOrBuilder getOverridesOrBuilder(
int index) {
if (overridesBuilder_ == null) {
return overrides_.get(index);
} else {
return overridesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Admin overrides on this limit.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public java.util.List<? extends com.google.api.serviceusage.v1beta1.QuotaOverrideOrBuilder>
getOverridesOrBuilderList() {
if (overridesBuilder_ != null) {
return overridesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(overrides_);
}
}
/**
*
*
* <pre>
* Admin overrides on this limit.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public com.google.api.serviceusage.v1beta1.QuotaOverride.Builder addOverridesBuilder() {
return getOverridesFieldBuilder()
.addBuilder(com.google.api.serviceusage.v1beta1.QuotaOverride.getDefaultInstance());
}
/**
*
*
* <pre>
* Admin overrides on this limit.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public com.google.api.serviceusage.v1beta1.QuotaOverride.Builder addOverridesBuilder(
int index) {
return getOverridesFieldBuilder()
.addBuilder(
index, com.google.api.serviceusage.v1beta1.QuotaOverride.getDefaultInstance());
}
/**
*
*
* <pre>
* Admin overrides on this limit.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public java.util.List<com.google.api.serviceusage.v1beta1.QuotaOverride.Builder>
getOverridesBuilderList() {
return getOverridesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.api.serviceusage.v1beta1.QuotaOverride,
com.google.api.serviceusage.v1beta1.QuotaOverride.Builder,
com.google.api.serviceusage.v1beta1.QuotaOverrideOrBuilder>
getOverridesFieldBuilder() {
if (overridesBuilder_ == null) {
overridesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.api.serviceusage.v1beta1.QuotaOverride,
com.google.api.serviceusage.v1beta1.QuotaOverride.Builder,
com.google.api.serviceusage.v1beta1.QuotaOverrideOrBuilder>(
overrides_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
overrides_ = null;
}
return overridesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token identifying which result to start with; returned by a previous list
* call.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token identifying which result to start with; returned by a previous list
* call.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token identifying which result to start with; returned by a previous list
* call.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token identifying which result to start with; returned by a previous list
* call.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Token identifying which result to start with; returned by a previous list
* call.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.api.serviceusage.v1beta1.ListAdminOverridesResponse)
}
// @@protoc_insertion_point(class_scope:google.api.serviceusage.v1beta1.ListAdminOverridesResponse)
private static final com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse();
}
public static com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListAdminOverridesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListAdminOverridesResponse>() {
@java.lang.Override
public ListAdminOverridesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListAdminOverridesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListAdminOverridesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.api.serviceusage.v1beta1.ListAdminOverridesResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,959 | java-bigquery-data-exchange/proto-google-cloud-bigquery-data-exchange-v1beta1/src/main/java/com/google/cloud/bigquery/dataexchange/v1beta1/ListListingsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/bigquery/dataexchange/v1beta1/dataexchange.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.bigquery.dataexchange.v1beta1;
/**
*
*
* <pre>
* Message for response to the list of Listings.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse}
*/
public final class ListListingsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse)
ListListingsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListListingsResponse.newBuilder() to construct.
private ListListingsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListListingsResponse() {
listings_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListListingsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.dataexchange.v1beta1.DataExchangeProto
.internal_static_google_cloud_bigquery_dataexchange_v1beta1_ListListingsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.dataexchange.v1beta1.DataExchangeProto
.internal_static_google_cloud_bigquery_dataexchange_v1beta1_ListListingsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse.class,
com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse.Builder.class);
}
public static final int LISTINGS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.bigquery.dataexchange.v1beta1.Listing> listings_;
/**
*
*
* <pre>
* The list of Listing.
* </pre>
*
* <code>repeated .google.cloud.bigquery.dataexchange.v1beta1.Listing listings = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.bigquery.dataexchange.v1beta1.Listing> getListingsList() {
return listings_;
}
/**
*
*
* <pre>
* The list of Listing.
* </pre>
*
* <code>repeated .google.cloud.bigquery.dataexchange.v1beta1.Listing listings = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.bigquery.dataexchange.v1beta1.ListingOrBuilder>
getListingsOrBuilderList() {
return listings_;
}
/**
*
*
* <pre>
* The list of Listing.
* </pre>
*
* <code>repeated .google.cloud.bigquery.dataexchange.v1beta1.Listing listings = 1;</code>
*/
@java.lang.Override
public int getListingsCount() {
return listings_.size();
}
/**
*
*
* <pre>
* The list of Listing.
* </pre>
*
* <code>repeated .google.cloud.bigquery.dataexchange.v1beta1.Listing listings = 1;</code>
*/
@java.lang.Override
public com.google.cloud.bigquery.dataexchange.v1beta1.Listing getListings(int index) {
return listings_.get(index);
}
/**
*
*
* <pre>
* The list of Listing.
* </pre>
*
* <code>repeated .google.cloud.bigquery.dataexchange.v1beta1.Listing listings = 1;</code>
*/
@java.lang.Override
public com.google.cloud.bigquery.dataexchange.v1beta1.ListingOrBuilder getListingsOrBuilder(
int index) {
return listings_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to request the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token to request the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < listings_.size(); i++) {
output.writeMessage(1, listings_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < listings_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, listings_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse)) {
return super.equals(obj);
}
com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse other =
(com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse) obj;
if (!getListingsList().equals(other.getListingsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getListingsCount() > 0) {
hash = (37 * hash) + LISTINGS_FIELD_NUMBER;
hash = (53 * hash) + getListingsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Message for response to the list of Listings.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse)
com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.dataexchange.v1beta1.DataExchangeProto
.internal_static_google_cloud_bigquery_dataexchange_v1beta1_ListListingsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.dataexchange.v1beta1.DataExchangeProto
.internal_static_google_cloud_bigquery_dataexchange_v1beta1_ListListingsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse.class,
com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse.Builder.class);
}
// Construct using
// com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (listingsBuilder_ == null) {
listings_ = java.util.Collections.emptyList();
} else {
listings_ = null;
listingsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.bigquery.dataexchange.v1beta1.DataExchangeProto
.internal_static_google_cloud_bigquery_dataexchange_v1beta1_ListListingsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse
getDefaultInstanceForType() {
return com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse build() {
com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse buildPartial() {
com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse result =
new com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse result) {
if (listingsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
listings_ = java.util.Collections.unmodifiableList(listings_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.listings_ = listings_;
} else {
result.listings_ = listingsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse) {
return mergeFrom(
(com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse other) {
if (other
== com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse
.getDefaultInstance()) return this;
if (listingsBuilder_ == null) {
if (!other.listings_.isEmpty()) {
if (listings_.isEmpty()) {
listings_ = other.listings_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureListingsIsMutable();
listings_.addAll(other.listings_);
}
onChanged();
}
} else {
if (!other.listings_.isEmpty()) {
if (listingsBuilder_.isEmpty()) {
listingsBuilder_.dispose();
listingsBuilder_ = null;
listings_ = other.listings_;
bitField0_ = (bitField0_ & ~0x00000001);
listingsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getListingsFieldBuilder()
: null;
} else {
listingsBuilder_.addAllMessages(other.listings_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.bigquery.dataexchange.v1beta1.Listing m =
input.readMessage(
com.google.cloud.bigquery.dataexchange.v1beta1.Listing.parser(),
extensionRegistry);
if (listingsBuilder_ == null) {
ensureListingsIsMutable();
listings_.add(m);
} else {
listingsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.bigquery.dataexchange.v1beta1.Listing> listings_ =
java.util.Collections.emptyList();
private void ensureListingsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
listings_ =
new java.util.ArrayList<com.google.cloud.bigquery.dataexchange.v1beta1.Listing>(
listings_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.bigquery.dataexchange.v1beta1.Listing,
com.google.cloud.bigquery.dataexchange.v1beta1.Listing.Builder,
com.google.cloud.bigquery.dataexchange.v1beta1.ListingOrBuilder>
listingsBuilder_;
/**
*
*
* <pre>
* The list of Listing.
* </pre>
*
* <code>repeated .google.cloud.bigquery.dataexchange.v1beta1.Listing listings = 1;</code>
*/
public java.util.List<com.google.cloud.bigquery.dataexchange.v1beta1.Listing>
getListingsList() {
if (listingsBuilder_ == null) {
return java.util.Collections.unmodifiableList(listings_);
} else {
return listingsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of Listing.
* </pre>
*
* <code>repeated .google.cloud.bigquery.dataexchange.v1beta1.Listing listings = 1;</code>
*/
public int getListingsCount() {
if (listingsBuilder_ == null) {
return listings_.size();
} else {
return listingsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of Listing.
* </pre>
*
* <code>repeated .google.cloud.bigquery.dataexchange.v1beta1.Listing listings = 1;</code>
*/
public com.google.cloud.bigquery.dataexchange.v1beta1.Listing getListings(int index) {
if (listingsBuilder_ == null) {
return listings_.get(index);
} else {
return listingsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of Listing.
* </pre>
*
* <code>repeated .google.cloud.bigquery.dataexchange.v1beta1.Listing listings = 1;</code>
*/
public Builder setListings(
int index, com.google.cloud.bigquery.dataexchange.v1beta1.Listing value) {
if (listingsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureListingsIsMutable();
listings_.set(index, value);
onChanged();
} else {
listingsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of Listing.
* </pre>
*
* <code>repeated .google.cloud.bigquery.dataexchange.v1beta1.Listing listings = 1;</code>
*/
public Builder setListings(
int index, com.google.cloud.bigquery.dataexchange.v1beta1.Listing.Builder builderForValue) {
if (listingsBuilder_ == null) {
ensureListingsIsMutable();
listings_.set(index, builderForValue.build());
onChanged();
} else {
listingsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of Listing.
* </pre>
*
* <code>repeated .google.cloud.bigquery.dataexchange.v1beta1.Listing listings = 1;</code>
*/
public Builder addListings(com.google.cloud.bigquery.dataexchange.v1beta1.Listing value) {
if (listingsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureListingsIsMutable();
listings_.add(value);
onChanged();
} else {
listingsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of Listing.
* </pre>
*
* <code>repeated .google.cloud.bigquery.dataexchange.v1beta1.Listing listings = 1;</code>
*/
public Builder addListings(
int index, com.google.cloud.bigquery.dataexchange.v1beta1.Listing value) {
if (listingsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureListingsIsMutable();
listings_.add(index, value);
onChanged();
} else {
listingsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of Listing.
* </pre>
*
* <code>repeated .google.cloud.bigquery.dataexchange.v1beta1.Listing listings = 1;</code>
*/
public Builder addListings(
com.google.cloud.bigquery.dataexchange.v1beta1.Listing.Builder builderForValue) {
if (listingsBuilder_ == null) {
ensureListingsIsMutable();
listings_.add(builderForValue.build());
onChanged();
} else {
listingsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of Listing.
* </pre>
*
* <code>repeated .google.cloud.bigquery.dataexchange.v1beta1.Listing listings = 1;</code>
*/
public Builder addListings(
int index, com.google.cloud.bigquery.dataexchange.v1beta1.Listing.Builder builderForValue) {
if (listingsBuilder_ == null) {
ensureListingsIsMutable();
listings_.add(index, builderForValue.build());
onChanged();
} else {
listingsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of Listing.
* </pre>
*
* <code>repeated .google.cloud.bigquery.dataexchange.v1beta1.Listing listings = 1;</code>
*/
public Builder addAllListings(
java.lang.Iterable<? extends com.google.cloud.bigquery.dataexchange.v1beta1.Listing>
values) {
if (listingsBuilder_ == null) {
ensureListingsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, listings_);
onChanged();
} else {
listingsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of Listing.
* </pre>
*
* <code>repeated .google.cloud.bigquery.dataexchange.v1beta1.Listing listings = 1;</code>
*/
public Builder clearListings() {
if (listingsBuilder_ == null) {
listings_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
listingsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of Listing.
* </pre>
*
* <code>repeated .google.cloud.bigquery.dataexchange.v1beta1.Listing listings = 1;</code>
*/
public Builder removeListings(int index) {
if (listingsBuilder_ == null) {
ensureListingsIsMutable();
listings_.remove(index);
onChanged();
} else {
listingsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of Listing.
* </pre>
*
* <code>repeated .google.cloud.bigquery.dataexchange.v1beta1.Listing listings = 1;</code>
*/
public com.google.cloud.bigquery.dataexchange.v1beta1.Listing.Builder getListingsBuilder(
int index) {
return getListingsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of Listing.
* </pre>
*
* <code>repeated .google.cloud.bigquery.dataexchange.v1beta1.Listing listings = 1;</code>
*/
public com.google.cloud.bigquery.dataexchange.v1beta1.ListingOrBuilder getListingsOrBuilder(
int index) {
if (listingsBuilder_ == null) {
return listings_.get(index);
} else {
return listingsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of Listing.
* </pre>
*
* <code>repeated .google.cloud.bigquery.dataexchange.v1beta1.Listing listings = 1;</code>
*/
public java.util.List<? extends com.google.cloud.bigquery.dataexchange.v1beta1.ListingOrBuilder>
getListingsOrBuilderList() {
if (listingsBuilder_ != null) {
return listingsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(listings_);
}
}
/**
*
*
* <pre>
* The list of Listing.
* </pre>
*
* <code>repeated .google.cloud.bigquery.dataexchange.v1beta1.Listing listings = 1;</code>
*/
public com.google.cloud.bigquery.dataexchange.v1beta1.Listing.Builder addListingsBuilder() {
return getListingsFieldBuilder()
.addBuilder(com.google.cloud.bigquery.dataexchange.v1beta1.Listing.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of Listing.
* </pre>
*
* <code>repeated .google.cloud.bigquery.dataexchange.v1beta1.Listing listings = 1;</code>
*/
public com.google.cloud.bigquery.dataexchange.v1beta1.Listing.Builder addListingsBuilder(
int index) {
return getListingsFieldBuilder()
.addBuilder(
index, com.google.cloud.bigquery.dataexchange.v1beta1.Listing.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of Listing.
* </pre>
*
* <code>repeated .google.cloud.bigquery.dataexchange.v1beta1.Listing listings = 1;</code>
*/
public java.util.List<com.google.cloud.bigquery.dataexchange.v1beta1.Listing.Builder>
getListingsBuilderList() {
return getListingsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.bigquery.dataexchange.v1beta1.Listing,
com.google.cloud.bigquery.dataexchange.v1beta1.Listing.Builder,
com.google.cloud.bigquery.dataexchange.v1beta1.ListingOrBuilder>
getListingsFieldBuilder() {
if (listingsBuilder_ == null) {
listingsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.bigquery.dataexchange.v1beta1.Listing,
com.google.cloud.bigquery.dataexchange.v1beta1.Listing.Builder,
com.google.cloud.bigquery.dataexchange.v1beta1.ListingOrBuilder>(
listings_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
listings_ = null;
}
return listingsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to request the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token to request the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token to request the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to request the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to request the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse)
private static final com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse();
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListListingsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListListingsResponse>() {
@java.lang.Override
public ListListingsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListListingsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListListingsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.bigquery.dataexchange.v1beta1.ListListingsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,960 | java-cloudbuild/proto-google-cloud-build-v2/src/main/java/com/google/cloudbuild/v2/InstallationState.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/devtools/cloudbuild/v2/repositories.proto
// Protobuf Java Version: 3.25.8
package com.google.cloudbuild.v2;
/**
*
*
* <pre>
* Describes stage and necessary actions to be taken by the
* user to complete the installation. Used for GitHub and GitHub Enterprise
* based connections.
* </pre>
*
* Protobuf type {@code google.devtools.cloudbuild.v2.InstallationState}
*/
public final class InstallationState extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.devtools.cloudbuild.v2.InstallationState)
InstallationStateOrBuilder {
private static final long serialVersionUID = 0L;
// Use InstallationState.newBuilder() to construct.
private InstallationState(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private InstallationState() {
stage_ = 0;
message_ = "";
actionUri_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new InstallationState();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloudbuild.v2.RepositoryManagerProto
.internal_static_google_devtools_cloudbuild_v2_InstallationState_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloudbuild.v2.RepositoryManagerProto
.internal_static_google_devtools_cloudbuild_v2_InstallationState_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloudbuild.v2.InstallationState.class,
com.google.cloudbuild.v2.InstallationState.Builder.class);
}
/**
*
*
* <pre>
* Stage of the installation process.
* </pre>
*
* Protobuf enum {@code google.devtools.cloudbuild.v2.InstallationState.Stage}
*/
public enum Stage implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* No stage specified.
* </pre>
*
* <code>STAGE_UNSPECIFIED = 0;</code>
*/
STAGE_UNSPECIFIED(0),
/**
*
*
* <pre>
* Only for GitHub Enterprise. An App creation has been requested.
* The user needs to confirm the creation in their GitHub enterprise host.
* </pre>
*
* <code>PENDING_CREATE_APP = 1;</code>
*/
PENDING_CREATE_APP(1),
/**
*
*
* <pre>
* User needs to authorize the GitHub (or Enterprise) App via OAuth.
* </pre>
*
* <code>PENDING_USER_OAUTH = 2;</code>
*/
PENDING_USER_OAUTH(2),
/**
*
*
* <pre>
* User needs to follow the link to install the GitHub (or Enterprise) App.
* </pre>
*
* <code>PENDING_INSTALL_APP = 3;</code>
*/
PENDING_INSTALL_APP(3),
/**
*
*
* <pre>
* Installation process has been completed.
* </pre>
*
* <code>COMPLETE = 10;</code>
*/
COMPLETE(10),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* No stage specified.
* </pre>
*
* <code>STAGE_UNSPECIFIED = 0;</code>
*/
public static final int STAGE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Only for GitHub Enterprise. An App creation has been requested.
* The user needs to confirm the creation in their GitHub enterprise host.
* </pre>
*
* <code>PENDING_CREATE_APP = 1;</code>
*/
public static final int PENDING_CREATE_APP_VALUE = 1;
/**
*
*
* <pre>
* User needs to authorize the GitHub (or Enterprise) App via OAuth.
* </pre>
*
* <code>PENDING_USER_OAUTH = 2;</code>
*/
public static final int PENDING_USER_OAUTH_VALUE = 2;
/**
*
*
* <pre>
* User needs to follow the link to install the GitHub (or Enterprise) App.
* </pre>
*
* <code>PENDING_INSTALL_APP = 3;</code>
*/
public static final int PENDING_INSTALL_APP_VALUE = 3;
/**
*
*
* <pre>
* Installation process has been completed.
* </pre>
*
* <code>COMPLETE = 10;</code>
*/
public static final int COMPLETE_VALUE = 10;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static Stage valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static Stage forNumber(int value) {
switch (value) {
case 0:
return STAGE_UNSPECIFIED;
case 1:
return PENDING_CREATE_APP;
case 2:
return PENDING_USER_OAUTH;
case 3:
return PENDING_INSTALL_APP;
case 10:
return COMPLETE;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<Stage> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<Stage> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<Stage>() {
public Stage findValueByNumber(int number) {
return Stage.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloudbuild.v2.InstallationState.getDescriptor().getEnumTypes().get(0);
}
private static final Stage[] VALUES = values();
public static Stage valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private Stage(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.devtools.cloudbuild.v2.InstallationState.Stage)
}
public static final int STAGE_FIELD_NUMBER = 1;
private int stage_ = 0;
/**
*
*
* <pre>
* Output only. Current step of the installation process.
* </pre>
*
* <code>
* .google.devtools.cloudbuild.v2.InstallationState.Stage stage = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The enum numeric value on the wire for stage.
*/
@java.lang.Override
public int getStageValue() {
return stage_;
}
/**
*
*
* <pre>
* Output only. Current step of the installation process.
* </pre>
*
* <code>
* .google.devtools.cloudbuild.v2.InstallationState.Stage stage = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The stage.
*/
@java.lang.Override
public com.google.cloudbuild.v2.InstallationState.Stage getStage() {
com.google.cloudbuild.v2.InstallationState.Stage result =
com.google.cloudbuild.v2.InstallationState.Stage.forNumber(stage_);
return result == null ? com.google.cloudbuild.v2.InstallationState.Stage.UNRECOGNIZED : result;
}
public static final int MESSAGE_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object message_ = "";
/**
*
*
* <pre>
* Output only. Message of what the user should do next to continue the
* installation. Empty string if the installation is already complete.
* </pre>
*
* <code>string message = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The message.
*/
@java.lang.Override
public java.lang.String getMessage() {
java.lang.Object ref = message_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
message_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. Message of what the user should do next to continue the
* installation. Empty string if the installation is already complete.
* </pre>
*
* <code>string message = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for message.
*/
@java.lang.Override
public com.google.protobuf.ByteString getMessageBytes() {
java.lang.Object ref = message_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
message_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ACTION_URI_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object actionUri_ = "";
/**
*
*
* <pre>
* Output only. Link to follow for next action. Empty string if the
* installation is already complete.
* </pre>
*
* <code>string action_uri = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The actionUri.
*/
@java.lang.Override
public java.lang.String getActionUri() {
java.lang.Object ref = actionUri_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
actionUri_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. Link to follow for next action. Empty string if the
* installation is already complete.
* </pre>
*
* <code>string action_uri = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for actionUri.
*/
@java.lang.Override
public com.google.protobuf.ByteString getActionUriBytes() {
java.lang.Object ref = actionUri_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
actionUri_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (stage_ != com.google.cloudbuild.v2.InstallationState.Stage.STAGE_UNSPECIFIED.getNumber()) {
output.writeEnum(1, stage_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(message_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, message_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(actionUri_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, actionUri_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (stage_ != com.google.cloudbuild.v2.InstallationState.Stage.STAGE_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, stage_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(message_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, message_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(actionUri_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, actionUri_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloudbuild.v2.InstallationState)) {
return super.equals(obj);
}
com.google.cloudbuild.v2.InstallationState other =
(com.google.cloudbuild.v2.InstallationState) obj;
if (stage_ != other.stage_) return false;
if (!getMessage().equals(other.getMessage())) return false;
if (!getActionUri().equals(other.getActionUri())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + STAGE_FIELD_NUMBER;
hash = (53 * hash) + stage_;
hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
hash = (53 * hash) + getMessage().hashCode();
hash = (37 * hash) + ACTION_URI_FIELD_NUMBER;
hash = (53 * hash) + getActionUri().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloudbuild.v2.InstallationState parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloudbuild.v2.InstallationState parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloudbuild.v2.InstallationState parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloudbuild.v2.InstallationState parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloudbuild.v2.InstallationState parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloudbuild.v2.InstallationState parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloudbuild.v2.InstallationState parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloudbuild.v2.InstallationState parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloudbuild.v2.InstallationState parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloudbuild.v2.InstallationState parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloudbuild.v2.InstallationState parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloudbuild.v2.InstallationState parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloudbuild.v2.InstallationState prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Describes stage and necessary actions to be taken by the
* user to complete the installation. Used for GitHub and GitHub Enterprise
* based connections.
* </pre>
*
* Protobuf type {@code google.devtools.cloudbuild.v2.InstallationState}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.devtools.cloudbuild.v2.InstallationState)
com.google.cloudbuild.v2.InstallationStateOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloudbuild.v2.RepositoryManagerProto
.internal_static_google_devtools_cloudbuild_v2_InstallationState_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloudbuild.v2.RepositoryManagerProto
.internal_static_google_devtools_cloudbuild_v2_InstallationState_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloudbuild.v2.InstallationState.class,
com.google.cloudbuild.v2.InstallationState.Builder.class);
}
// Construct using com.google.cloudbuild.v2.InstallationState.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
stage_ = 0;
message_ = "";
actionUri_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloudbuild.v2.RepositoryManagerProto
.internal_static_google_devtools_cloudbuild_v2_InstallationState_descriptor;
}
@java.lang.Override
public com.google.cloudbuild.v2.InstallationState getDefaultInstanceForType() {
return com.google.cloudbuild.v2.InstallationState.getDefaultInstance();
}
@java.lang.Override
public com.google.cloudbuild.v2.InstallationState build() {
com.google.cloudbuild.v2.InstallationState result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloudbuild.v2.InstallationState buildPartial() {
com.google.cloudbuild.v2.InstallationState result =
new com.google.cloudbuild.v2.InstallationState(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloudbuild.v2.InstallationState result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.stage_ = stage_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.message_ = message_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.actionUri_ = actionUri_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloudbuild.v2.InstallationState) {
return mergeFrom((com.google.cloudbuild.v2.InstallationState) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloudbuild.v2.InstallationState other) {
if (other == com.google.cloudbuild.v2.InstallationState.getDefaultInstance()) return this;
if (other.stage_ != 0) {
setStageValue(other.getStageValue());
}
if (!other.getMessage().isEmpty()) {
message_ = other.message_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getActionUri().isEmpty()) {
actionUri_ = other.actionUri_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
stage_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18:
{
message_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
actionUri_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int stage_ = 0;
/**
*
*
* <pre>
* Output only. Current step of the installation process.
* </pre>
*
* <code>
* .google.devtools.cloudbuild.v2.InstallationState.Stage stage = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The enum numeric value on the wire for stage.
*/
@java.lang.Override
public int getStageValue() {
return stage_;
}
/**
*
*
* <pre>
* Output only. Current step of the installation process.
* </pre>
*
* <code>
* .google.devtools.cloudbuild.v2.InstallationState.Stage stage = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param value The enum numeric value on the wire for stage to set.
* @return This builder for chaining.
*/
public Builder setStageValue(int value) {
stage_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Current step of the installation process.
* </pre>
*
* <code>
* .google.devtools.cloudbuild.v2.InstallationState.Stage stage = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The stage.
*/
@java.lang.Override
public com.google.cloudbuild.v2.InstallationState.Stage getStage() {
com.google.cloudbuild.v2.InstallationState.Stage result =
com.google.cloudbuild.v2.InstallationState.Stage.forNumber(stage_);
return result == null
? com.google.cloudbuild.v2.InstallationState.Stage.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Output only. Current step of the installation process.
* </pre>
*
* <code>
* .google.devtools.cloudbuild.v2.InstallationState.Stage stage = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param value The stage to set.
* @return This builder for chaining.
*/
public Builder setStage(com.google.cloudbuild.v2.InstallationState.Stage value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
stage_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Current step of the installation process.
* </pre>
*
* <code>
* .google.devtools.cloudbuild.v2.InstallationState.Stage stage = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearStage() {
bitField0_ = (bitField0_ & ~0x00000001);
stage_ = 0;
onChanged();
return this;
}
private java.lang.Object message_ = "";
/**
*
*
* <pre>
* Output only. Message of what the user should do next to continue the
* installation. Empty string if the installation is already complete.
* </pre>
*
* <code>string message = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The message.
*/
public java.lang.String getMessage() {
java.lang.Object ref = message_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
message_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. Message of what the user should do next to continue the
* installation. Empty string if the installation is already complete.
* </pre>
*
* <code>string message = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for message.
*/
public com.google.protobuf.ByteString getMessageBytes() {
java.lang.Object ref = message_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
message_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. Message of what the user should do next to continue the
* installation. Empty string if the installation is already complete.
* </pre>
*
* <code>string message = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The message to set.
* @return This builder for chaining.
*/
public Builder setMessage(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
message_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Message of what the user should do next to continue the
* installation. Empty string if the installation is already complete.
* </pre>
*
* <code>string message = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearMessage() {
message_ = getDefaultInstance().getMessage();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Message of what the user should do next to continue the
* installation. Empty string if the installation is already complete.
* </pre>
*
* <code>string message = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The bytes for message to set.
* @return This builder for chaining.
*/
public Builder setMessageBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
message_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object actionUri_ = "";
/**
*
*
* <pre>
* Output only. Link to follow for next action. Empty string if the
* installation is already complete.
* </pre>
*
* <code>string action_uri = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The actionUri.
*/
public java.lang.String getActionUri() {
java.lang.Object ref = actionUri_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
actionUri_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. Link to follow for next action. Empty string if the
* installation is already complete.
* </pre>
*
* <code>string action_uri = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for actionUri.
*/
public com.google.protobuf.ByteString getActionUriBytes() {
java.lang.Object ref = actionUri_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
actionUri_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. Link to follow for next action. Empty string if the
* installation is already complete.
* </pre>
*
* <code>string action_uri = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The actionUri to set.
* @return This builder for chaining.
*/
public Builder setActionUri(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
actionUri_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Link to follow for next action. Empty string if the
* installation is already complete.
* </pre>
*
* <code>string action_uri = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearActionUri() {
actionUri_ = getDefaultInstance().getActionUri();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Link to follow for next action. Empty string if the
* installation is already complete.
* </pre>
*
* <code>string action_uri = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The bytes for actionUri to set.
* @return This builder for chaining.
*/
public Builder setActionUriBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
actionUri_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.devtools.cloudbuild.v2.InstallationState)
}
// @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v2.InstallationState)
private static final com.google.cloudbuild.v2.InstallationState DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloudbuild.v2.InstallationState();
}
public static com.google.cloudbuild.v2.InstallationState getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<InstallationState> PARSER =
new com.google.protobuf.AbstractParser<InstallationState>() {
@java.lang.Override
public InstallationState parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<InstallationState> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<InstallationState> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloudbuild.v2.InstallationState getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,996 | java-dataform/proto-google-cloud-dataform-v1beta1/src/main/java/com/google/cloud/dataform/v1beta1/CreateRepositoryRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dataform/v1beta1/dataform.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dataform.v1beta1;
/**
*
*
* <pre>
* `CreateRepository` request message.
* </pre>
*
* Protobuf type {@code google.cloud.dataform.v1beta1.CreateRepositoryRequest}
*/
public final class CreateRepositoryRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dataform.v1beta1.CreateRepositoryRequest)
CreateRepositoryRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateRepositoryRequest.newBuilder() to construct.
private CreateRepositoryRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateRepositoryRequest() {
parent_ = "";
repositoryId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateRepositoryRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_CreateRepositoryRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_CreateRepositoryRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataform.v1beta1.CreateRepositoryRequest.class,
com.google.cloud.dataform.v1beta1.CreateRepositoryRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The location in which to create the repository. Must be in the
* format `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The location in which to create the repository. Must be in the
* format `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REPOSITORY_FIELD_NUMBER = 2;
private com.google.cloud.dataform.v1beta1.Repository repository_;
/**
*
*
* <pre>
* Required. The repository to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the repository field is set.
*/
@java.lang.Override
public boolean hasRepository() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The repository to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The repository.
*/
@java.lang.Override
public com.google.cloud.dataform.v1beta1.Repository getRepository() {
return repository_ == null
? com.google.cloud.dataform.v1beta1.Repository.getDefaultInstance()
: repository_;
}
/**
*
*
* <pre>
* Required. The repository to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.dataform.v1beta1.RepositoryOrBuilder getRepositoryOrBuilder() {
return repository_ == null
? com.google.cloud.dataform.v1beta1.Repository.getDefaultInstance()
: repository_;
}
public static final int REPOSITORY_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object repositoryId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the repository, which will become the final
* component of the repository's resource name.
* </pre>
*
* <code>string repository_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The repositoryId.
*/
@java.lang.Override
public java.lang.String getRepositoryId() {
java.lang.Object ref = repositoryId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
repositoryId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the repository, which will become the final
* component of the repository's resource name.
* </pre>
*
* <code>string repository_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for repositoryId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRepositoryIdBytes() {
java.lang.Object ref = repositoryId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
repositoryId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getRepository());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(repositoryId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, repositoryId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getRepository());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(repositoryId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, repositoryId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dataform.v1beta1.CreateRepositoryRequest)) {
return super.equals(obj);
}
com.google.cloud.dataform.v1beta1.CreateRepositoryRequest other =
(com.google.cloud.dataform.v1beta1.CreateRepositoryRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasRepository() != other.hasRepository()) return false;
if (hasRepository()) {
if (!getRepository().equals(other.getRepository())) return false;
}
if (!getRepositoryId().equals(other.getRepositoryId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasRepository()) {
hash = (37 * hash) + REPOSITORY_FIELD_NUMBER;
hash = (53 * hash) + getRepository().hashCode();
}
hash = (37 * hash) + REPOSITORY_ID_FIELD_NUMBER;
hash = (53 * hash) + getRepositoryId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dataform.v1beta1.CreateRepositoryRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1beta1.CreateRepositoryRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.CreateRepositoryRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1beta1.CreateRepositoryRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.CreateRepositoryRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1beta1.CreateRepositoryRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.CreateRepositoryRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1beta1.CreateRepositoryRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.CreateRepositoryRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1beta1.CreateRepositoryRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.CreateRepositoryRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1beta1.CreateRepositoryRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dataform.v1beta1.CreateRepositoryRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* `CreateRepository` request message.
* </pre>
*
* Protobuf type {@code google.cloud.dataform.v1beta1.CreateRepositoryRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dataform.v1beta1.CreateRepositoryRequest)
com.google.cloud.dataform.v1beta1.CreateRepositoryRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_CreateRepositoryRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_CreateRepositoryRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataform.v1beta1.CreateRepositoryRequest.class,
com.google.cloud.dataform.v1beta1.CreateRepositoryRequest.Builder.class);
}
// Construct using com.google.cloud.dataform.v1beta1.CreateRepositoryRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getRepositoryFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
repository_ = null;
if (repositoryBuilder_ != null) {
repositoryBuilder_.dispose();
repositoryBuilder_ = null;
}
repositoryId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_CreateRepositoryRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dataform.v1beta1.CreateRepositoryRequest getDefaultInstanceForType() {
return com.google.cloud.dataform.v1beta1.CreateRepositoryRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dataform.v1beta1.CreateRepositoryRequest build() {
com.google.cloud.dataform.v1beta1.CreateRepositoryRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dataform.v1beta1.CreateRepositoryRequest buildPartial() {
com.google.cloud.dataform.v1beta1.CreateRepositoryRequest result =
new com.google.cloud.dataform.v1beta1.CreateRepositoryRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.dataform.v1beta1.CreateRepositoryRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.repository_ = repositoryBuilder_ == null ? repository_ : repositoryBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.repositoryId_ = repositoryId_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dataform.v1beta1.CreateRepositoryRequest) {
return mergeFrom((com.google.cloud.dataform.v1beta1.CreateRepositoryRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dataform.v1beta1.CreateRepositoryRequest other) {
if (other == com.google.cloud.dataform.v1beta1.CreateRepositoryRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasRepository()) {
mergeRepository(other.getRepository());
}
if (!other.getRepositoryId().isEmpty()) {
repositoryId_ = other.repositoryId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getRepositoryFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
repositoryId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The location in which to create the repository. Must be in the
* format `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The location in which to create the repository. Must be in the
* format `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The location in which to create the repository. Must be in the
* format `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The location in which to create the repository. Must be in the
* format `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The location in which to create the repository. Must be in the
* format `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.dataform.v1beta1.Repository repository_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataform.v1beta1.Repository,
com.google.cloud.dataform.v1beta1.Repository.Builder,
com.google.cloud.dataform.v1beta1.RepositoryOrBuilder>
repositoryBuilder_;
/**
*
*
* <pre>
* Required. The repository to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the repository field is set.
*/
public boolean hasRepository() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The repository to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The repository.
*/
public com.google.cloud.dataform.v1beta1.Repository getRepository() {
if (repositoryBuilder_ == null) {
return repository_ == null
? com.google.cloud.dataform.v1beta1.Repository.getDefaultInstance()
: repository_;
} else {
return repositoryBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The repository to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setRepository(com.google.cloud.dataform.v1beta1.Repository value) {
if (repositoryBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
repository_ = value;
} else {
repositoryBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The repository to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setRepository(
com.google.cloud.dataform.v1beta1.Repository.Builder builderForValue) {
if (repositoryBuilder_ == null) {
repository_ = builderForValue.build();
} else {
repositoryBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The repository to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeRepository(com.google.cloud.dataform.v1beta1.Repository value) {
if (repositoryBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& repository_ != null
&& repository_ != com.google.cloud.dataform.v1beta1.Repository.getDefaultInstance()) {
getRepositoryBuilder().mergeFrom(value);
} else {
repository_ = value;
}
} else {
repositoryBuilder_.mergeFrom(value);
}
if (repository_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The repository to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearRepository() {
bitField0_ = (bitField0_ & ~0x00000002);
repository_ = null;
if (repositoryBuilder_ != null) {
repositoryBuilder_.dispose();
repositoryBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The repository to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dataform.v1beta1.Repository.Builder getRepositoryBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getRepositoryFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The repository to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dataform.v1beta1.RepositoryOrBuilder getRepositoryOrBuilder() {
if (repositoryBuilder_ != null) {
return repositoryBuilder_.getMessageOrBuilder();
} else {
return repository_ == null
? com.google.cloud.dataform.v1beta1.Repository.getDefaultInstance()
: repository_;
}
}
/**
*
*
* <pre>
* Required. The repository to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataform.v1beta1.Repository,
com.google.cloud.dataform.v1beta1.Repository.Builder,
com.google.cloud.dataform.v1beta1.RepositoryOrBuilder>
getRepositoryFieldBuilder() {
if (repositoryBuilder_ == null) {
repositoryBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataform.v1beta1.Repository,
com.google.cloud.dataform.v1beta1.Repository.Builder,
com.google.cloud.dataform.v1beta1.RepositoryOrBuilder>(
getRepository(), getParentForChildren(), isClean());
repository_ = null;
}
return repositoryBuilder_;
}
private java.lang.Object repositoryId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the repository, which will become the final
* component of the repository's resource name.
* </pre>
*
* <code>string repository_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The repositoryId.
*/
public java.lang.String getRepositoryId() {
java.lang.Object ref = repositoryId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
repositoryId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the repository, which will become the final
* component of the repository's resource name.
* </pre>
*
* <code>string repository_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for repositoryId.
*/
public com.google.protobuf.ByteString getRepositoryIdBytes() {
java.lang.Object ref = repositoryId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
repositoryId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the repository, which will become the final
* component of the repository's resource name.
* </pre>
*
* <code>string repository_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The repositoryId to set.
* @return This builder for chaining.
*/
public Builder setRepositoryId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
repositoryId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the repository, which will become the final
* component of the repository's resource name.
* </pre>
*
* <code>string repository_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearRepositoryId() {
repositoryId_ = getDefaultInstance().getRepositoryId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the repository, which will become the final
* component of the repository's resource name.
* </pre>
*
* <code>string repository_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for repositoryId to set.
* @return This builder for chaining.
*/
public Builder setRepositoryIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
repositoryId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dataform.v1beta1.CreateRepositoryRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dataform.v1beta1.CreateRepositoryRequest)
private static final com.google.cloud.dataform.v1beta1.CreateRepositoryRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dataform.v1beta1.CreateRepositoryRequest();
}
public static com.google.cloud.dataform.v1beta1.CreateRepositoryRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateRepositoryRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateRepositoryRequest>() {
@java.lang.Override
public CreateRepositoryRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateRepositoryRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateRepositoryRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dataform.v1beta1.CreateRepositoryRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,082 | java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrajectoryInOrderMatchInstance.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/evaluation_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1;
/**
*
*
* <pre>
* Spec for TrajectoryInOrderMatch instance.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance}
*/
public final class TrajectoryInOrderMatchInstance extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance)
TrajectoryInOrderMatchInstanceOrBuilder {
private static final long serialVersionUID = 0L;
// Use TrajectoryInOrderMatchInstance.newBuilder() to construct.
private TrajectoryInOrderMatchInstance(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TrajectoryInOrderMatchInstance() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new TrajectoryInOrderMatchInstance();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_TrajectoryInOrderMatchInstance_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_TrajectoryInOrderMatchInstance_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance.class,
com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance.Builder.class);
}
private int bitField0_;
public static final int PREDICTED_TRAJECTORY_FIELD_NUMBER = 1;
private com.google.cloud.aiplatform.v1beta1.Trajectory predictedTrajectory_;
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the predictedTrajectory field is set.
*/
@java.lang.Override
public boolean hasPredictedTrajectory() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The predictedTrajectory.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.Trajectory getPredictedTrajectory() {
return predictedTrajectory_ == null
? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()
: predictedTrajectory_;
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder getPredictedTrajectoryOrBuilder() {
return predictedTrajectory_ == null
? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()
: predictedTrajectory_;
}
public static final int REFERENCE_TRAJECTORY_FIELD_NUMBER = 2;
private com.google.cloud.aiplatform.v1beta1.Trajectory referenceTrajectory_;
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the referenceTrajectory field is set.
*/
@java.lang.Override
public boolean hasReferenceTrajectory() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The referenceTrajectory.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.Trajectory getReferenceTrajectory() {
return referenceTrajectory_ == null
? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()
: referenceTrajectory_;
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder getReferenceTrajectoryOrBuilder() {
return referenceTrajectory_ == null
? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()
: referenceTrajectory_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getPredictedTrajectory());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getReferenceTrajectory());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getPredictedTrajectory());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getReferenceTrajectory());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance other =
(com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance) obj;
if (hasPredictedTrajectory() != other.hasPredictedTrajectory()) return false;
if (hasPredictedTrajectory()) {
if (!getPredictedTrajectory().equals(other.getPredictedTrajectory())) return false;
}
if (hasReferenceTrajectory() != other.hasReferenceTrajectory()) return false;
if (hasReferenceTrajectory()) {
if (!getReferenceTrajectory().equals(other.getReferenceTrajectory())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasPredictedTrajectory()) {
hash = (37 * hash) + PREDICTED_TRAJECTORY_FIELD_NUMBER;
hash = (53 * hash) + getPredictedTrajectory().hashCode();
}
if (hasReferenceTrajectory()) {
hash = (37 * hash) + REFERENCE_TRAJECTORY_FIELD_NUMBER;
hash = (53 * hash) + getReferenceTrajectory().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Spec for TrajectoryInOrderMatch instance.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance)
com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstanceOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_TrajectoryInOrderMatchInstance_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_TrajectoryInOrderMatchInstance_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance.class,
com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance.Builder.class);
}
// Construct using
// com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getPredictedTrajectoryFieldBuilder();
getReferenceTrajectoryFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
predictedTrajectory_ = null;
if (predictedTrajectoryBuilder_ != null) {
predictedTrajectoryBuilder_.dispose();
predictedTrajectoryBuilder_ = null;
}
referenceTrajectory_ = null;
if (referenceTrajectoryBuilder_ != null) {
referenceTrajectoryBuilder_.dispose();
referenceTrajectoryBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_TrajectoryInOrderMatchInstance_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance
getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance build() {
com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance buildPartial() {
com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance result =
new com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.predictedTrajectory_ =
predictedTrajectoryBuilder_ == null
? predictedTrajectory_
: predictedTrajectoryBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.referenceTrajectory_ =
referenceTrajectoryBuilder_ == null
? referenceTrajectory_
: referenceTrajectoryBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance) {
return mergeFrom(
(com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance other) {
if (other
== com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance
.getDefaultInstance()) return this;
if (other.hasPredictedTrajectory()) {
mergePredictedTrajectory(other.getPredictedTrajectory());
}
if (other.hasReferenceTrajectory()) {
mergeReferenceTrajectory(other.getReferenceTrajectory());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(
getPredictedTrajectoryFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(
getReferenceTrajectoryFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.aiplatform.v1beta1.Trajectory predictedTrajectory_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.Trajectory,
com.google.cloud.aiplatform.v1beta1.Trajectory.Builder,
com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder>
predictedTrajectoryBuilder_;
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the predictedTrajectory field is set.
*/
public boolean hasPredictedTrajectory() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The predictedTrajectory.
*/
public com.google.cloud.aiplatform.v1beta1.Trajectory getPredictedTrajectory() {
if (predictedTrajectoryBuilder_ == null) {
return predictedTrajectory_ == null
? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()
: predictedTrajectory_;
} else {
return predictedTrajectoryBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setPredictedTrajectory(com.google.cloud.aiplatform.v1beta1.Trajectory value) {
if (predictedTrajectoryBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
predictedTrajectory_ = value;
} else {
predictedTrajectoryBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setPredictedTrajectory(
com.google.cloud.aiplatform.v1beta1.Trajectory.Builder builderForValue) {
if (predictedTrajectoryBuilder_ == null) {
predictedTrajectory_ = builderForValue.build();
} else {
predictedTrajectoryBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergePredictedTrajectory(com.google.cloud.aiplatform.v1beta1.Trajectory value) {
if (predictedTrajectoryBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& predictedTrajectory_ != null
&& predictedTrajectory_
!= com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()) {
getPredictedTrajectoryBuilder().mergeFrom(value);
} else {
predictedTrajectory_ = value;
}
} else {
predictedTrajectoryBuilder_.mergeFrom(value);
}
if (predictedTrajectory_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearPredictedTrajectory() {
bitField0_ = (bitField0_ & ~0x00000001);
predictedTrajectory_ = null;
if (predictedTrajectoryBuilder_ != null) {
predictedTrajectoryBuilder_.dispose();
predictedTrajectoryBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.Trajectory.Builder getPredictedTrajectoryBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getPredictedTrajectoryFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder
getPredictedTrajectoryOrBuilder() {
if (predictedTrajectoryBuilder_ != null) {
return predictedTrajectoryBuilder_.getMessageOrBuilder();
} else {
return predictedTrajectory_ == null
? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()
: predictedTrajectory_;
}
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.Trajectory,
com.google.cloud.aiplatform.v1beta1.Trajectory.Builder,
com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder>
getPredictedTrajectoryFieldBuilder() {
if (predictedTrajectoryBuilder_ == null) {
predictedTrajectoryBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.Trajectory,
com.google.cloud.aiplatform.v1beta1.Trajectory.Builder,
com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder>(
getPredictedTrajectory(), getParentForChildren(), isClean());
predictedTrajectory_ = null;
}
return predictedTrajectoryBuilder_;
}
private com.google.cloud.aiplatform.v1beta1.Trajectory referenceTrajectory_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.Trajectory,
com.google.cloud.aiplatform.v1beta1.Trajectory.Builder,
com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder>
referenceTrajectoryBuilder_;
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the referenceTrajectory field is set.
*/
public boolean hasReferenceTrajectory() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The referenceTrajectory.
*/
public com.google.cloud.aiplatform.v1beta1.Trajectory getReferenceTrajectory() {
if (referenceTrajectoryBuilder_ == null) {
return referenceTrajectory_ == null
? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()
: referenceTrajectory_;
} else {
return referenceTrajectoryBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setReferenceTrajectory(com.google.cloud.aiplatform.v1beta1.Trajectory value) {
if (referenceTrajectoryBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
referenceTrajectory_ = value;
} else {
referenceTrajectoryBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setReferenceTrajectory(
com.google.cloud.aiplatform.v1beta1.Trajectory.Builder builderForValue) {
if (referenceTrajectoryBuilder_ == null) {
referenceTrajectory_ = builderForValue.build();
} else {
referenceTrajectoryBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeReferenceTrajectory(com.google.cloud.aiplatform.v1beta1.Trajectory value) {
if (referenceTrajectoryBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& referenceTrajectory_ != null
&& referenceTrajectory_
!= com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()) {
getReferenceTrajectoryBuilder().mergeFrom(value);
} else {
referenceTrajectory_ = value;
}
} else {
referenceTrajectoryBuilder_.mergeFrom(value);
}
if (referenceTrajectory_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearReferenceTrajectory() {
bitField0_ = (bitField0_ & ~0x00000002);
referenceTrajectory_ = null;
if (referenceTrajectoryBuilder_ != null) {
referenceTrajectoryBuilder_.dispose();
referenceTrajectoryBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.Trajectory.Builder getReferenceTrajectoryBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getReferenceTrajectoryFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder
getReferenceTrajectoryOrBuilder() {
if (referenceTrajectoryBuilder_ != null) {
return referenceTrajectoryBuilder_.getMessageOrBuilder();
} else {
return referenceTrajectory_ == null
? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()
: referenceTrajectory_;
}
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.Trajectory,
com.google.cloud.aiplatform.v1beta1.Trajectory.Builder,
com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder>
getReferenceTrajectoryFieldBuilder() {
if (referenceTrajectoryBuilder_ == null) {
referenceTrajectoryBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.Trajectory,
com.google.cloud.aiplatform.v1beta1.Trajectory.Builder,
com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder>(
getReferenceTrajectory(), getParentForChildren(), isClean());
referenceTrajectory_ = null;
}
return referenceTrajectoryBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance)
private static final com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance();
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TrajectoryInOrderMatchInstance> PARSER =
new com.google.protobuf.AbstractParser<TrajectoryInOrderMatchInstance>() {
@java.lang.Override
public TrajectoryInOrderMatchInstance parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<TrajectoryInOrderMatchInstance> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<TrajectoryInOrderMatchInstance> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInstance
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,047 | java-datacatalog/proto-google-cloud-datacatalog-v1beta1/src/main/java/com/google/cloud/datacatalog/v1beta1/UpdateTagTemplateRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datacatalog/v1beta1/datacatalog.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datacatalog.v1beta1;
/**
*
*
* <pre>
* Request message for
* [UpdateTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplate].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest}
*/
public final class UpdateTagTemplateRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest)
UpdateTagTemplateRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateTagTemplateRequest.newBuilder() to construct.
private UpdateTagTemplateRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateTagTemplateRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateTagTemplateRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.v1beta1.Datacatalog
.internal_static_google_cloud_datacatalog_v1beta1_UpdateTagTemplateRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.v1beta1.Datacatalog
.internal_static_google_cloud_datacatalog_v1beta1_UpdateTagTemplateRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest.class,
com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest.Builder.class);
}
private int bitField0_;
public static final int TAG_TEMPLATE_FIELD_NUMBER = 1;
private com.google.cloud.datacatalog.v1beta1.TagTemplate tagTemplate_;
/**
*
*
* <pre>
* Required. The template to update. The "name" field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1beta1.TagTemplate tag_template = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the tagTemplate field is set.
*/
@java.lang.Override
public boolean hasTagTemplate() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The template to update. The "name" field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1beta1.TagTemplate tag_template = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The tagTemplate.
*/
@java.lang.Override
public com.google.cloud.datacatalog.v1beta1.TagTemplate getTagTemplate() {
return tagTemplate_ == null
? com.google.cloud.datacatalog.v1beta1.TagTemplate.getDefaultInstance()
: tagTemplate_;
}
/**
*
*
* <pre>
* Required. The template to update. The "name" field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1beta1.TagTemplate tag_template = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.datacatalog.v1beta1.TagTemplateOrBuilder getTagTemplateOrBuilder() {
return tagTemplate_ == null
? com.google.cloud.datacatalog.v1beta1.TagTemplate.getDefaultInstance()
: tagTemplate_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Names of fields whose values to overwrite on a tag template. Currently,
* only `display_name` can be overwritten.
*
* In general, if this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on a tag template. Currently,
* only `display_name` can be overwritten.
*
* In general, if this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on a tag template. Currently,
* only `display_name` can be overwritten.
*
* In general, if this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getTagTemplate());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getTagTemplate());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest)) {
return super.equals(obj);
}
com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest other =
(com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest) obj;
if (hasTagTemplate() != other.hasTagTemplate()) return false;
if (hasTagTemplate()) {
if (!getTagTemplate().equals(other.getTagTemplate())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasTagTemplate()) {
hash = (37 * hash) + TAG_TEMPLATE_FIELD_NUMBER;
hash = (53 * hash) + getTagTemplate().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [UpdateTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplate].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest)
com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.v1beta1.Datacatalog
.internal_static_google_cloud_datacatalog_v1beta1_UpdateTagTemplateRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.v1beta1.Datacatalog
.internal_static_google_cloud_datacatalog_v1beta1_UpdateTagTemplateRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest.class,
com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest.Builder.class);
}
// Construct using com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getTagTemplateFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
tagTemplate_ = null;
if (tagTemplateBuilder_ != null) {
tagTemplateBuilder_.dispose();
tagTemplateBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datacatalog.v1beta1.Datacatalog
.internal_static_google_cloud_datacatalog_v1beta1_UpdateTagTemplateRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest
getDefaultInstanceForType() {
return com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest build() {
com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest buildPartial() {
com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest result =
new com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.tagTemplate_ =
tagTemplateBuilder_ == null ? tagTemplate_ : tagTemplateBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest) {
return mergeFrom((com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest other) {
if (other
== com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest.getDefaultInstance())
return this;
if (other.hasTagTemplate()) {
mergeTagTemplate(other.getTagTemplate());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getTagTemplateFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.datacatalog.v1beta1.TagTemplate tagTemplate_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.v1beta1.TagTemplate,
com.google.cloud.datacatalog.v1beta1.TagTemplate.Builder,
com.google.cloud.datacatalog.v1beta1.TagTemplateOrBuilder>
tagTemplateBuilder_;
/**
*
*
* <pre>
* Required. The template to update. The "name" field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1beta1.TagTemplate tag_template = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the tagTemplate field is set.
*/
public boolean hasTagTemplate() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The template to update. The "name" field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1beta1.TagTemplate tag_template = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The tagTemplate.
*/
public com.google.cloud.datacatalog.v1beta1.TagTemplate getTagTemplate() {
if (tagTemplateBuilder_ == null) {
return tagTemplate_ == null
? com.google.cloud.datacatalog.v1beta1.TagTemplate.getDefaultInstance()
: tagTemplate_;
} else {
return tagTemplateBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The template to update. The "name" field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1beta1.TagTemplate tag_template = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTagTemplate(com.google.cloud.datacatalog.v1beta1.TagTemplate value) {
if (tagTemplateBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
tagTemplate_ = value;
} else {
tagTemplateBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The template to update. The "name" field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1beta1.TagTemplate tag_template = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTagTemplate(
com.google.cloud.datacatalog.v1beta1.TagTemplate.Builder builderForValue) {
if (tagTemplateBuilder_ == null) {
tagTemplate_ = builderForValue.build();
} else {
tagTemplateBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The template to update. The "name" field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1beta1.TagTemplate tag_template = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeTagTemplate(com.google.cloud.datacatalog.v1beta1.TagTemplate value) {
if (tagTemplateBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& tagTemplate_ != null
&& tagTemplate_
!= com.google.cloud.datacatalog.v1beta1.TagTemplate.getDefaultInstance()) {
getTagTemplateBuilder().mergeFrom(value);
} else {
tagTemplate_ = value;
}
} else {
tagTemplateBuilder_.mergeFrom(value);
}
if (tagTemplate_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The template to update. The "name" field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1beta1.TagTemplate tag_template = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearTagTemplate() {
bitField0_ = (bitField0_ & ~0x00000001);
tagTemplate_ = null;
if (tagTemplateBuilder_ != null) {
tagTemplateBuilder_.dispose();
tagTemplateBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The template to update. The "name" field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1beta1.TagTemplate tag_template = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.datacatalog.v1beta1.TagTemplate.Builder getTagTemplateBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTagTemplateFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The template to update. The "name" field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1beta1.TagTemplate tag_template = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.datacatalog.v1beta1.TagTemplateOrBuilder getTagTemplateOrBuilder() {
if (tagTemplateBuilder_ != null) {
return tagTemplateBuilder_.getMessageOrBuilder();
} else {
return tagTemplate_ == null
? com.google.cloud.datacatalog.v1beta1.TagTemplate.getDefaultInstance()
: tagTemplate_;
}
}
/**
*
*
* <pre>
* Required. The template to update. The "name" field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1beta1.TagTemplate tag_template = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.v1beta1.TagTemplate,
com.google.cloud.datacatalog.v1beta1.TagTemplate.Builder,
com.google.cloud.datacatalog.v1beta1.TagTemplateOrBuilder>
getTagTemplateFieldBuilder() {
if (tagTemplateBuilder_ == null) {
tagTemplateBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.v1beta1.TagTemplate,
com.google.cloud.datacatalog.v1beta1.TagTemplate.Builder,
com.google.cloud.datacatalog.v1beta1.TagTemplateOrBuilder>(
getTagTemplate(), getParentForChildren(), isClean());
tagTemplate_ = null;
}
return tagTemplateBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Names of fields whose values to overwrite on a tag template. Currently,
* only `display_name` can be overwritten.
*
* In general, if this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on a tag template. Currently,
* only `display_name` can be overwritten.
*
* In general, if this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on a tag template. Currently,
* only `display_name` can be overwritten.
*
* In general, if this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on a tag template. Currently,
* only `display_name` can be overwritten.
*
* In general, if this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on a tag template. Currently,
* only `display_name` can be overwritten.
*
* In general, if this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on a tag template. Currently,
* only `display_name` can be overwritten.
*
* In general, if this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on a tag template. Currently,
* only `display_name` can be overwritten.
*
* In general, if this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on a tag template. Currently,
* only `display_name` can be overwritten.
*
* In general, if this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on a tag template. Currently,
* only `display_name` can be overwritten.
*
* In general, if this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest)
private static final com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest();
}
public static com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateTagTemplateRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateTagTemplateRequest>() {
@java.lang.Override
public UpdateTagTemplateRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateTagTemplateRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateTagTemplateRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
hibernate/hibernate-orm | 34,549 | hibernate-core/src/main/java/org/hibernate/dialect/function/json/H2JsonTableFunction.java | /*
* SPDX-License-Identifier: Apache-2.0
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.dialect.function.json;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.hibernate.QueryException;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.util.NullnessUtil;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.SelectablePath;
import org.hibernate.metamodel.mapping.internal.SelectableMappingImpl;
import org.hibernate.metamodel.model.domain.ReturnableType;
import org.hibernate.query.sqm.tuple.internal.AnonymousTupleTableGroupProducer;
import org.hibernate.query.spi.QueryEngine;
import org.hibernate.query.sqm.ComparisonOperator;
import org.hibernate.query.sqm.function.FunctionRenderer;
import org.hibernate.query.sqm.function.SelfRenderingFunctionSqlAstExpression;
import org.hibernate.query.sqm.function.SelfRenderingSqmSetReturningFunction;
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
import org.hibernate.query.sqm.tree.SqmTypedNode;
import org.hibernate.query.sqm.tree.expression.SqmExpression;
import org.hibernate.query.sqm.tree.expression.SqmJsonTableFunction;
import org.hibernate.spi.NavigablePath;
import org.hibernate.sql.ast.SqlAstJoinType;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.sql.ast.tree.cte.CteContainer;
import org.hibernate.sql.ast.tree.expression.CastTarget;
import org.hibernate.sql.ast.tree.expression.ColumnReference;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.JsonPathPassingClause;
import org.hibernate.sql.ast.tree.expression.JsonQueryEmptyBehavior;
import org.hibernate.sql.ast.tree.expression.JsonQueryWrapMode;
import org.hibernate.sql.ast.tree.expression.JsonTableColumnDefinition;
import org.hibernate.sql.ast.tree.expression.JsonTableColumnsClause;
import org.hibernate.sql.ast.tree.expression.JsonTableErrorBehavior;
import org.hibernate.sql.ast.tree.expression.JsonTableExistsColumnDefinition;
import org.hibernate.sql.ast.tree.expression.JsonTableNestedColumnDefinition;
import org.hibernate.sql.ast.tree.expression.JsonTableOrdinalityColumnDefinition;
import org.hibernate.sql.ast.tree.expression.JsonTableQueryColumnDefinition;
import org.hibernate.sql.ast.tree.expression.JsonTableValueColumnDefinition;
import org.hibernate.sql.ast.tree.expression.JsonValueEmptyBehavior;
import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.QueryTransformer;
import org.hibernate.sql.ast.tree.expression.SelfRenderingExpression;
import org.hibernate.sql.ast.tree.from.FunctionTableGroup;
import org.hibernate.sql.ast.tree.from.TableGroup;
import org.hibernate.sql.ast.tree.from.TableGroupJoin;
import org.hibernate.sql.ast.tree.predicate.ComparisonPredicate;
import org.hibernate.sql.ast.tree.predicate.Predicate;
import org.hibernate.sql.ast.tree.predicate.PredicateContainer;
import org.hibernate.sql.ast.tree.select.QuerySpec;
import org.hibernate.type.BasicType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.spi.TypeConfiguration;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import static java.util.Collections.emptyList;
/**
* H2 json_table function.
* <p>
* H2 does not support "lateral" i.e. the use of a from node within another,
* but we can apply the same trick that we already applied everywhere else for H2,
* which is to join a sequence table to emulate array element rows
* and eliminate non-existing array elements by checking the index against array length.
* Finally, we rewrite the selection expressions to access the array by joined sequence index.
*/
public class H2JsonTableFunction extends JsonTableFunction {
private final int maximumArraySize;
public H2JsonTableFunction(int maximumArraySize, TypeConfiguration typeConfiguration) {
super( new H2JsonTableSetReturningFunctionTypeResolver(), typeConfiguration );
this.maximumArraySize = maximumArraySize;
}
@Override
protected <T> SelfRenderingSqmSetReturningFunction<T> generateSqmSetReturningFunctionExpression(
List<? extends SqmTypedNode<?>> sqmArguments,
QueryEngine queryEngine) {
//noinspection unchecked
return new SqmJsonTableFunction<>(
this,
this,
getArgumentsValidator(),
getSetReturningTypeResolver(),
queryEngine.getCriteriaBuilder(),
(SqmExpression<?>) sqmArguments.get( 0 ),
sqmArguments.size() > 1 ? (SqmExpression<String>) sqmArguments.get( 1 ) : null
) {
@Override
public TableGroup convertToSqlAst(
NavigablePath navigablePath,
String identifierVariable,
boolean lateral,
boolean canUseInnerJoins,
boolean withOrdinality,
SqmToSqlAstConverter walker) {
// Register a transformer that adds a join predicate "array_length(array) <= index"
final FunctionTableGroup functionTableGroup = (FunctionTableGroup) super.convertToSqlAst(
navigablePath,
identifierVariable,
lateral,
canUseInnerJoins,
withOrdinality,
walker
);
final JsonTableArguments arguments = JsonTableArguments.extract(
functionTableGroup.getPrimaryTableReference().getFunctionExpression().getArguments()
);
// Register a query transformer to register a join predicate
walker.registerQueryTransformer(
new JsonTableQueryTransformer( functionTableGroup, arguments, maximumArraySize ) );
return functionTableGroup;
}
};
}
private static class JsonTableQueryTransformer implements QueryTransformer {
private final FunctionTableGroup functionTableGroup;
private final JsonTableArguments arguments;
private final int maximumArraySize;
public JsonTableQueryTransformer(FunctionTableGroup functionTableGroup, JsonTableArguments arguments, int maximumArraySize) {
this.functionTableGroup = functionTableGroup;
this.arguments = arguments;
this.maximumArraySize = maximumArraySize;
}
@Override
public QuerySpec transform(CteContainer cteContainer, QuerySpec querySpec, SqmToSqlAstConverter converter) {
final boolean isArray;
if ( arguments.jsonPath() != null ) {
if ( !( arguments.jsonPath() instanceof Literal literal) ) {
throw new QueryException( "H2 json_table() only supports literal json paths, but got " + arguments.jsonPath() );
}
final String rawJsonPath = (String) literal.getLiteralValue();
isArray = isArrayAccess( rawJsonPath );
}
else {
// We have to assume this is an array
isArray = true;
}
if ( isArray ) {
final TableGroup parentTableGroup = querySpec.getFromClause().queryTableGroups(
tg -> tg.findTableGroupJoin( functionTableGroup ) == null ? null : tg
);
final PredicateContainer predicateContainer;
if ( parentTableGroup != null ) {
predicateContainer = parentTableGroup.findTableGroupJoin( functionTableGroup );
}
else {
predicateContainer = querySpec;
}
final BasicType<Integer> integerType = converter.getSqmCreationContext()
.getNodeBuilder()
.getIntegerType();
final Expression jsonDocument;
if ( arguments.jsonDocument().getColumnReference() == null ) {
jsonDocument = new ColumnReference(
functionTableGroup.getPrimaryTableReference().getIdentificationVariable() + "_",
"d",
false,
null,
arguments.jsonDocument().getExpressionType().getSingleJdbcMapping()
);
}
else {
jsonDocument = arguments.jsonDocument();
}
final Expression lhs = new ArrayLengthExpression( jsonDocument, integerType );
final Expression rhs = new ColumnReference(
functionTableGroup.getPrimaryTableReference().getIdentificationVariable(),
// The default column name for the system_range function
"x",
false,
null,
integerType
);
predicateContainer.applyPredicate(
new ComparisonPredicate( lhs, ComparisonOperator.GREATER_THAN_OR_EQUAL, rhs ) );
}
final int lastArrayIndex = getLastArrayIndex( arguments.columnsClause(), 0 );
if ( lastArrayIndex != 0 ) {
// Create a synthetic function table group which will render system_range() joins
// for every nested path for arrays
final String tableIdentifierVariable = functionTableGroup.getPrimaryTableReference()
.getIdentificationVariable();
final Expression jsonDocument;
if ( arguments.jsonDocument().getColumnReference() == null ) {
jsonDocument = new ColumnReference(
tableIdentifierVariable + "_",
"d",
false,
null,
arguments.jsonDocument().getExpressionType().getSingleJdbcMapping()
);
}
else {
jsonDocument = arguments.jsonDocument();
}
final TableGroup tableGroup = new FunctionTableGroup(
functionTableGroup.getNavigablePath().append( "{synthetic}" ),
null,
new SelfRenderingFunctionSqlAstExpression(
"json_table_emulation",
new NestedPathFunctionRenderer(
tableIdentifierVariable,
arguments,
jsonDocument,
maximumArraySize,
lastArrayIndex
),
emptyList(),
null,
null
),
tableIdentifierVariable + "_synthetic_",
emptyList(),
Set.of( "" ),
false,
false,
true,
converter.getCreationContext().getSessionFactory()
);
final BasicType<Integer> integerType = converter.getSqmCreationContext()
.getNodeBuilder()
.getIntegerType();
// The join predicate compares the length of the last array expression against system_range() index.
// Since a table function expression can't render its own `on` clause, this split of logic is necessary
final Expression lhs = new ArrayLengthExpression(
determineLastArrayExpression( tableIdentifierVariable, arguments, jsonDocument ),
integerType
);
final Expression rhs = new ColumnReference(
tableIdentifierVariable + "_" + lastArrayIndex + "_",
// The default column name for the system_range function
"x",
false,
null,
integerType
);
final Predicate predicate = new ComparisonPredicate( lhs, ComparisonOperator.GREATER_THAN_OR_EQUAL, rhs );
functionTableGroup.addTableGroupJoin(
new TableGroupJoin( tableGroup.getNavigablePath(), SqlAstJoinType.LEFT, tableGroup, predicate )
);
}
return querySpec;
}
private static Expression determineLastArrayExpression(String tableIdentifierVariable, JsonTableArguments arguments, Expression jsonDocument) {
final ArrayExpressionEntry arrayExpressionEntry = determineLastArrayExpression(
tableIdentifierVariable,
determineJsonElement( tableIdentifierVariable, arguments, jsonDocument ),
arguments.columnsClause(),
new ArrayExpressionEntry( 0, null )
);
return NullnessUtil.castNonNull( arrayExpressionEntry.expression() );
}
record ArrayExpressionEntry(int arrayIndex, @Nullable Expression expression) {
}
private static ArrayExpressionEntry determineLastArrayExpression(String tableIdentifierVariable, Expression parentJson, JsonTableColumnsClause jsonTableColumnsClause, ArrayExpressionEntry parentEntry) {
// Depth-first traversal to obtain the last nested path that refers to an array within this tree
ArrayExpressionEntry currentArrayEntry = parentEntry;
for ( JsonTableColumnDefinition columnDefinition : jsonTableColumnsClause.getColumnDefinitions() ) {
if ( columnDefinition instanceof JsonTableNestedColumnDefinition nestedColumnDefinition ) {
final String rawJsonPath = nestedColumnDefinition.jsonPath();
final boolean isArray = isArrayAccess( rawJsonPath );
final String jsonPath = isArray ? rawJsonPath.substring( 0, rawJsonPath.length() - 3 ) : rawJsonPath;
final Expression jsonQueryResult = new JsonValueExpression( parentJson, jsonPath, null );
final Expression jsonElement;
final ArrayExpressionEntry nextArrayExpression;
if ( isArray ) {
final int nextArrayIndex = currentArrayEntry.arrayIndex() + 1;
jsonElement = new ArrayAccessExpression( jsonQueryResult, ordinalityExpression( tableIdentifierVariable, nextArrayIndex ) );
nextArrayExpression = new ArrayExpressionEntry( nextArrayIndex, jsonQueryResult );
}
else {
jsonElement = jsonQueryResult;
nextArrayExpression = currentArrayEntry;
}
currentArrayEntry = determineLastArrayExpression(
tableIdentifierVariable,
jsonElement,
nestedColumnDefinition.columns(),
nextArrayExpression
);
}
}
return currentArrayEntry;
}
private static Expression determineJsonElement(String tableIdentifierVariable, JsonTableArguments arguments, Expression jsonDocument) {
// Applies the json path and array index access to obtain the "current" processing element
final boolean isArray;
final Expression jsonQueryResult;
if ( arguments.jsonPath() != null ) {
if ( !(arguments.jsonPath() instanceof Literal literal) ) {
throw new QueryException(
"H2 json_table() only supports literal json paths, but got " + arguments.jsonPath() );
}
final String rawJsonPath = (String) literal.getLiteralValue();
isArray = isArrayAccess( rawJsonPath );
final String jsonPath = isArray ? rawJsonPath.substring( 0, rawJsonPath.length() - 3 ) : rawJsonPath;
jsonQueryResult = "$".equals( jsonPath )
? jsonDocument
: new JsonValueExpression( jsonDocument, arguments.isJsonType(), jsonPath, arguments.passingClause() );
}
else {
// We have to assume this is an array
isArray = true;
jsonQueryResult = jsonDocument;
}
final Expression jsonElement;
if ( isArray ) {
jsonElement = new ArrayAccessExpression( jsonQueryResult, tableIdentifierVariable + ".x" );
}
else {
jsonElement = jsonQueryResult;
}
return jsonElement;
}
private static class NestedPathFunctionRenderer implements FunctionRenderer {
private final String tableIdentifierVariable;
private final JsonTableArguments arguments;
private final Expression jsonDocument;
private final int maximumArraySize;
private final int lastArrayIndex;
public NestedPathFunctionRenderer(String tableIdentifierVariable, JsonTableArguments arguments, Expression jsonDocument, int maximumArraySize, int lastArrayIndex) {
this.tableIdentifierVariable = tableIdentifierVariable;
this.arguments = arguments;
this.jsonDocument = jsonDocument;
this.maximumArraySize = maximumArraySize;
this.lastArrayIndex = lastArrayIndex;
}
@Override
public void render(SqlAppender sqlAppender, List<? extends SqlAstNode> sqlAstArguments, ReturnableType<?> returnType, SqlAstTranslator<?> walker) {
final Expression jsonElement = determineJsonElement( tableIdentifierVariable, arguments, jsonDocument );
renderNestedColumnJoins( sqlAppender, tableIdentifierVariable, jsonElement, arguments.columnsClause(), 0, lastArrayIndex, walker );
}
private int renderNestedColumnJoins(SqlAppender sqlAppender, String tableIdentifierVariable, Expression parentJson, JsonTableColumnsClause jsonTableColumnsClause, int arrayIndex, int lastArrayIndex, SqlAstTranslator<?> walker) {
// H2 doesn't support lateral joins, so we have to emulate array flattening by joining against a
// system_range() with a condition that checks if the array index is still within bounds
int currentArrayIndex = arrayIndex;
for ( JsonTableColumnDefinition columnDefinition : jsonTableColumnsClause.getColumnDefinitions() ) {
if ( columnDefinition instanceof JsonTableNestedColumnDefinition nestedColumnDefinition ) {
final String rawJsonPath = nestedColumnDefinition.jsonPath();
final boolean isArray = isArrayAccess( rawJsonPath );
final String jsonPath = isArray ? rawJsonPath.substring( 0, rawJsonPath.length() - 3 ) : rawJsonPath;
final int nextArrayIndex = currentArrayIndex + ( isArray ? 1 : 0 );
// The left join for the first element was already rendered via TableGroupJoin
if ( isArray && currentArrayIndex != 0 ) {
sqlAppender.appendSql( " left join " );
}
final Expression jsonQueryResult = new JsonValueExpression( parentJson, jsonPath, null );
final Expression jsonElement;
if ( isArray ) {
// Only render system ranges for arrays
sqlAppender.append( "system_range(1," );
sqlAppender.append( Integer.toString( maximumArraySize ) );
sqlAppender.append( ") " );
sqlAppender.appendSql( tableIdentifierVariable );
sqlAppender.appendSql( '_' );
sqlAppender.appendSql( nextArrayIndex );
sqlAppender.appendSql( '_' );
final String ordinalityExpression = ordinalityExpression( tableIdentifierVariable, nextArrayIndex );
// The join condition for the last array will be rendered via TableGroupJoin
if ( nextArrayIndex != lastArrayIndex ) {
sqlAppender.appendSql( " on coalesce(array_length(" );
jsonQueryResult.accept( walker );
sqlAppender.append( "),0)>=" );
sqlAppender.appendSql( ordinalityExpression );
}
jsonElement = new ArrayAccessExpression( jsonQueryResult, ordinalityExpression );
}
else {
jsonElement = jsonQueryResult;
}
currentArrayIndex = renderNestedColumnJoins(
sqlAppender,
tableIdentifierVariable,
jsonElement,
nestedColumnDefinition.columns(),
nextArrayIndex,
lastArrayIndex,
walker
);
}
}
return currentArrayIndex;
}
}
}
@Override
public boolean rendersIdentifierVariable(List<SqlAstNode> arguments, SessionFactoryImplementor sessionFactory) {
// To make our lives simpler when supporting non-column JSON document arguments
return true;
}
@Override
protected void renderJsonTable(
SqlAppender sqlAppender,
JsonTableArguments arguments,
AnonymousTupleTableGroupProducer tupleType,
String tableIdentifierVariable,
SqlAstTranslator<?> walker) {
if ( arguments.errorBehavior() == JsonTableErrorBehavior.NULL ) {
throw new QueryException( "Can't emulate null on error clause on H2" );
}
final Expression jsonPathExpression = arguments.jsonPath();
final boolean isArray = isArrayAccess( jsonPathExpression, walker );
if ( arguments.jsonDocument().getColumnReference() == null ) {
sqlAppender.append( '(' );
}
if ( isArray ) {
sqlAppender.append( "system_range(1," );
sqlAppender.append( Integer.toString( maximumArraySize ) );
sqlAppender.append( ") " );
}
else {
sqlAppender.append( "system_range(1,1) " );
}
sqlAppender.append( tableIdentifierVariable );
if ( arguments.jsonDocument().getColumnReference() == null ) {
sqlAppender.append( " join (values (" );
arguments.jsonDocument().accept( walker );
if ( !arguments.isJsonType() ) {
sqlAppender.append( " format json" );
}
sqlAppender.append( ")) " );
sqlAppender.append( tableIdentifierVariable );
sqlAppender.append( "_(d) on 1=1)" );
}
}
private static boolean isArrayAccess(@Nullable Expression jsonPath, SqlAstTranslator<?> walker) {
if ( jsonPath != null ) {
try {
return isArrayAccess( walker.getLiteralValue( jsonPath ) );
}
catch (Exception ex) {
// Ignore
}
}
// Assume array by default
return true;
}
private static boolean isArrayAccess(String jsonPath) {
return jsonPath.endsWith( "[*]" );
}
private static int getLastArrayIndex(JsonTableColumnsClause jsonTableColumnsClause, int arrayIndex) {
int currentArrayIndex = arrayIndex;
for ( JsonTableColumnDefinition columnDefinition : jsonTableColumnsClause.getColumnDefinitions() ) {
if ( columnDefinition instanceof JsonTableNestedColumnDefinition nestedColumnDefinition ) {
currentArrayIndex = getLastArrayIndex(
nestedColumnDefinition.columns(),
arrayIndex + (isArrayAccess( nestedColumnDefinition.jsonPath() ) ? 1 : 0 )
);
}
}
return currentArrayIndex;
}
private static class JsonValueExpression implements SelfRenderingExpression {
private final Expression jsonDocument;
private final boolean isJsonType;
private final String jsonPath;
private final @Nullable JsonPathPassingClause passingClause;
public JsonValueExpression(Expression jsonDocument, String jsonPath, @Nullable JsonPathPassingClause passingClause) {
this.jsonDocument = jsonDocument;
// This controls whether we put parenthesis around the document on dereference
this.isJsonType = jsonDocument instanceof JsonValueExpression
|| jsonDocument instanceof ArrayAccessExpression;
this.jsonPath = jsonPath;
this.passingClause = passingClause;
}
public JsonValueExpression(Expression jsonDocument, boolean isJsonType, String jsonPath, @Nullable JsonPathPassingClause passingClause) {
this.jsonDocument = jsonDocument;
this.isJsonType = isJsonType;
this.jsonPath = jsonPath;
this.passingClause = passingClause;
}
@Override
public void renderToSql(SqlAppender sqlAppender, SqlAstTranslator<?> walker, SessionFactoryImplementor sessionFactory) {
H2JsonValueFunction.renderJsonPath(
sqlAppender,
jsonDocument,
isJsonType,
walker,
jsonPath,
passingClause
);
}
@Override
public JdbcMappingContainer getExpressionType() {
return null;
}
}
private static class ArrayAccessExpression implements SelfRenderingExpression {
private final Expression array;
private final String indexFragment;
public ArrayAccessExpression(Expression array, String indexFragment) {
this.array = array;
this.indexFragment = indexFragment;
}
@Override
public void renderToSql(SqlAppender sqlAppender, SqlAstTranslator<?> walker, SessionFactoryImplementor sessionFactory) {
sqlAppender.appendSql( "array_get(" );
array.accept( walker );
sqlAppender.appendSql( ',' );
sqlAppender.appendSql( indexFragment );
sqlAppender.appendSql( ')' );
}
@Override
public JdbcMappingContainer getExpressionType() {
return null;
}
}
private static class ArrayLengthExpression implements SelfRenderingExpression {
private final Expression arrayExpression;
private final BasicType<Integer> integerType;
public ArrayLengthExpression(Expression arrayExpression, BasicType<Integer> integerType) {
this.arrayExpression = arrayExpression;
this.integerType = integerType;
}
@Override
public void renderToSql(
SqlAppender sqlAppender,
SqlAstTranslator<?> walker,
SessionFactoryImplementor sessionFactory) {
sqlAppender.append( "coalesce(array_length(" );
arrayExpression.accept( walker );
sqlAppender.append( "),0)" );
}
@Override
public JdbcMappingContainer getExpressionType() {
return integerType;
}
}
private static String ordinalityExpression(String tableIdentifierVariable, int clauseLevel) {
if ( clauseLevel == 0 ) {
return tableIdentifierVariable + ".x";
}
return tableIdentifierVariable + "_" + clauseLevel + "_.x";
}
/**
* This type resolver essentially implements all the JSON path handling and casting via column read expressions
* instead of rendering to the {@code from} clause like other {@code json_table()} implementations.
* This is necessary because H2 does not support lateral joins.
* The rendering is tightly coupled to the {@code system_range()} joins that are rendered for nested paths
* that refer to arrays.
*/
private static class H2JsonTableSetReturningFunctionTypeResolver extends JsonTableSetReturningFunctionTypeResolver {
public H2JsonTableSetReturningFunctionTypeResolver() {
}
@Override
public SelectableMapping[] resolveFunctionReturnType(
List<? extends SqlAstNode> sqlAstNodes,
String tableIdentifierVariable,
boolean lateral,
boolean withOrdinality,
SqmToSqlAstConverter converter) {
final JsonTableArguments arguments = JsonTableArguments.extract( sqlAstNodes );
final Expression jsonDocument = arguments.jsonDocument();
final String documentPath;
final ColumnReference columnReference = jsonDocument.getColumnReference();
if ( columnReference != null ) {
documentPath = columnReference.getExpressionText();
}
else {
documentPath = tableIdentifierVariable + "_." + "d";
}
final String parentPath;
final boolean isArray;
if ( arguments.jsonPath() != null ) {
if ( !( arguments.jsonPath() instanceof Literal literal) ) {
throw new QueryException( "H2 json_table() only supports literal json paths, but got " + arguments.jsonPath() );
}
final String rawJsonPath = (String) literal.getLiteralValue();
isArray = isArrayAccess( rawJsonPath );
final String jsonPath = isArray ? rawJsonPath.substring( 0, rawJsonPath.length() - 3 ) : rawJsonPath;
parentPath = H2JsonValueFunction.applyJsonPath( documentPath, true, arguments.isJsonType(), jsonPath, arguments.passingClause() );
}
else {
// We have to assume this is an array
isArray = true;
parentPath = documentPath;
}
final String parentReadExpression;
if ( isArray ) {
parentReadExpression = "array_get(" + parentPath + "," + tableIdentifierVariable + ".x)";
}
else {
parentReadExpression = '(' + parentPath + ')';
}
final List<JsonTableColumnDefinition> columnDefinitions = arguments.columnsClause().getColumnDefinitions();
final List<SelectableMapping> selectableMappings = new ArrayList<>( columnDefinitions.size() );
addSelectableMappings( selectableMappings, tableIdentifierVariable, arguments.columnsClause(), 0, parentReadExpression, converter );
return selectableMappings.toArray( new SelectableMapping[0] );
}
protected int addSelectableMappings(List<SelectableMapping> selectableMappings, String tableIdentifierVariable, JsonTableColumnsClause columnsClause, int clauseLevel, String parentReadExpression, SqmToSqlAstConverter converter) {
int currentClauseLevel = clauseLevel;
for ( JsonTableColumnDefinition columnDefinition : columnsClause.getColumnDefinitions() ) {
if ( columnDefinition instanceof JsonTableExistsColumnDefinition definition ) {
addSelectableMappings( selectableMappings, definition, parentReadExpression, converter );
}
else if ( columnDefinition instanceof JsonTableQueryColumnDefinition definition ) {
addSelectableMappings( selectableMappings, definition, parentReadExpression, converter );
}
else if ( columnDefinition instanceof JsonTableValueColumnDefinition definition ) {
addSelectableMappings( selectableMappings, definition, parentReadExpression, converter );
}
else if ( columnDefinition instanceof JsonTableOrdinalityColumnDefinition definition ) {
addSelectableMappings( selectableMappings, tableIdentifierVariable, definition, clauseLevel, converter );
}
else {
final JsonTableNestedColumnDefinition definition = (JsonTableNestedColumnDefinition) columnDefinition;
currentClauseLevel = addSelectableMappings(
selectableMappings,
tableIdentifierVariable,
definition,
currentClauseLevel,
parentReadExpression,
converter
);
}
}
return currentClauseLevel;
}
protected int addSelectableMappings(List<SelectableMapping> selectableMappings, String tableIdentifierVariable, JsonTableNestedColumnDefinition columnDefinition, int clauseLevel, String parentReadExpression, SqmToSqlAstConverter converter) {
final String rawJsonPath = columnDefinition.jsonPath();
final boolean isArray = isArrayAccess( rawJsonPath );
final String jsonPath = isArray ? rawJsonPath.substring( 0, rawJsonPath.length() - 3 ) : rawJsonPath;
final String parentPath = H2JsonValueFunction.applyJsonPath( parentReadExpression, false, true, jsonPath, null );
final int nextClauseLevel;
final String readExpression;
if ( isArray ) {
nextClauseLevel = clauseLevel + 1;
readExpression = "array_get(" + parentPath + "," + ordinalityExpression( tableIdentifierVariable, nextClauseLevel ) + ")";
}
else {
nextClauseLevel = clauseLevel;
readExpression = parentPath;
}
return addSelectableMappings( selectableMappings, tableIdentifierVariable, columnDefinition.columns(), nextClauseLevel, readExpression, converter );
}
protected void addSelectableMappings(List<SelectableMapping> selectableMappings, String tableIdentifierVariable, JsonTableOrdinalityColumnDefinition definition, int clauseLevel, SqmToSqlAstConverter converter) {
addSelectableMapping(
selectableMappings,
definition.name(),
ordinalityExpression( tableIdentifierVariable, clauseLevel ),
converter.getCreationContext().getTypeConfiguration().getBasicTypeForJavaType( Long.class )
);
}
protected void addSelectableMappings(List<SelectableMapping> selectableMappings, JsonTableValueColumnDefinition definition, String parentReadExpression, SqmToSqlAstConverter converter) {
final JsonValueEmptyBehavior emptyBehavior = definition.emptyBehavior();
final Literal defaultExpression;
if ( emptyBehavior != null && emptyBehavior.getDefaultExpression() != null ) {
if ( !( emptyBehavior.getDefaultExpression() instanceof Literal literal ) ) {
throw new QueryException( "H2 json_table() only supports literal default expressions, but got " + emptyBehavior.getDefaultExpression() );
}
defaultExpression = literal;
}
else {
defaultExpression = null;
}
final String baseReadExpression = determineElementReadExpression( definition.name(), definition.jsonPath(), parentReadExpression );
final String elementReadExpression = castValueExpression( baseReadExpression, definition.type(), defaultExpression, converter );
addSelectableMapping(
selectableMappings,
definition.name(),
elementReadExpression,
definition.type().getJdbcMapping()
);
}
private String castValueExpression(String baseReadExpression, CastTarget castTarget, @Nullable Literal defaultExpression, SqmToSqlAstConverter converter) {
final StringBuilder sb = new StringBuilder( baseReadExpression.length() + 200 );
if ( defaultExpression != null ) {
sb.append( "coalesce(" );
}
final boolean hexDecoding = H2JsonValueFunction.needsHexDecoding( castTarget.getJdbcMapping() );
sb.append( "cast(" );
if ( hexDecoding ) {
// We encode binary data as hex, so we have to decode here
sb.append( "hextoraw(regexp_replace(" );
}
sb.append( "stringdecode(regexp_replace(nullif(" );
sb.append( baseReadExpression );
sb.append( ",JSON'null'),'^\"(.*)\"$','$1'))" );
if ( hexDecoding ) {
sb.append( ",'([0-9a-f][0-9a-f])','00$1'))" );
}
sb.append( " as " );
sb.append( determineColumnType( castTarget, converter.getCreationContext().getTypeConfiguration() ) );
sb.append( ')' );
if ( defaultExpression != null ) {
sb.append( ',' );
//noinspection unchecked
final String sqlLiteral = defaultExpression.getJdbcMapping().getJdbcLiteralFormatter().toJdbcLiteral(
defaultExpression.getLiteralValue(),
converter.getCreationContext().getDialect(),
converter.getCreationContext().getWrapperOptions()
);
sb.append( sqlLiteral );
sb.append( ')' );
}
return sb.toString();
}
protected void addSelectableMappings(List<SelectableMapping> selectableMappings, JsonTableQueryColumnDefinition definition, String parentReadExpression, SqmToSqlAstConverter converter) {
final String baseReadExpression = determineElementReadExpression( definition.name(), definition.jsonPath(), parentReadExpression );
final String elementReadExpression = castQueryExpression( baseReadExpression, definition.emptyBehavior(), definition.wrapMode(), converter );
addSelectableMapping(
selectableMappings,
definition.name(),
elementReadExpression,
converter.getCreationContext().getTypeConfiguration().getBasicTypeRegistry()
.resolve( String.class, SqlTypes.JSON )
);
}
private String castQueryExpression(String baseReadExpression, JsonQueryEmptyBehavior emptyBehavior, JsonQueryWrapMode wrapMode, SqmToSqlAstConverter converter) {
final StringBuilder sb = new StringBuilder( baseReadExpression.length() + 200 );
if ( emptyBehavior == JsonQueryEmptyBehavior.EMPTY_ARRAY || emptyBehavior == JsonQueryEmptyBehavior.EMPTY_OBJECT ) {
sb.append( "coalesce(" );
}
if ( wrapMode == JsonQueryWrapMode.WITH_WRAPPER ) {
sb.append( "'['||" );
}
sb.append( "stringdecode(regexp_replace(nullif(" );
sb.append( baseReadExpression );
sb.append( ",JSON'null'),'^\"(.*)\"$','$1'))");
if ( wrapMode == JsonQueryWrapMode.WITH_WRAPPER ) {
sb.append( "||']'" );
}
if ( emptyBehavior == JsonQueryEmptyBehavior.EMPTY_ARRAY ) {
sb.append( ",'[]')" );
}
else if ( emptyBehavior == JsonQueryEmptyBehavior.EMPTY_OBJECT ) {
sb.append( ",'{}')" );
}
return sb.toString();
}
protected void addSelectableMappings(List<SelectableMapping> selectableMappings, JsonTableExistsColumnDefinition definition, String parentReadExpression, SqmToSqlAstConverter converter) {
final String baseReadExpression = determineElementReadExpression( definition.name(), definition.jsonPath(), parentReadExpression );
final String elementReadExpression = parentReadExpression + " is not null and " + baseReadExpression + " is not null";
addSelectableMapping(
selectableMappings,
definition.name(),
elementReadExpression,
converter.getCreationContext().getTypeConfiguration().getBasicTypeForJavaType( Boolean.class )
);
}
protected String determineElementReadExpression(String name, @Nullable String jsonPath, String parentReadExpression) {
return jsonPath == null
? H2JsonValueFunction.applyJsonPath( parentReadExpression, false, true, "$." + name, null )
: H2JsonValueFunction.applyJsonPath( parentReadExpression, false, true, jsonPath, null );
}
protected void addSelectableMapping(List<SelectableMapping> selectableMappings, String name, String elementReadExpression, JdbcMapping type) {
selectableMappings.add( new SelectableMappingImpl(
"",
name,
new SelectablePath( name ),
elementReadExpression,
null,
null,
null,
null,
null,
null,
null,
false,
false,
false,
false,
false,
false,
type
));
}
}
}
|
apache/hadoop | 36,955 | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacitySchedulerQueues.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity;
import java.io.IOException;
import java.util.Map;
import org.apache.hadoop.yarn.api.records.QueueState;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.nodelabels.CommonNodeLabelsManager;
import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
import org.apache.hadoop.yarn.server.resourcemanager.RMContextImpl;
import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.NullRMNodeLabelsManager;
import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceLimits;
import org.apache.hadoop.yarn.server.resourcemanager.security.ClientToAMTokenSecretManagerInRM;
import org.apache.hadoop.yarn.server.resourcemanager.security.NMTokenSecretManagerInRM;
import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager;
import org.apache.hadoop.yarn.util.resource.ResourceUtils;
import org.apache.hadoop.yarn.util.resource.Resources;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerConfigGeneratorForTest.setMaxAllocMb;
import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerConfigGeneratorForTest.setMaxAllocVcores;
import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerConfigGeneratorForTest.setMaxAllocation;
import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerConfigGeneratorForTest.unsetMaxAllocation;
import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerQueueHelpers.A;
import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerQueueHelpers.A1;
import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerQueueHelpers.A2;
import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerQueueHelpers.A1_B1;
import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerQueueHelpers.B;
import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerQueueHelpers.B1;
import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerQueueHelpers.B1_CAPACITY;
import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerQueueHelpers.B2;
import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerQueueHelpers.B2_CAPACITY;
import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerQueueHelpers.B3;
import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerQueueHelpers.B3_CAPACITY;
import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerQueueHelpers.ROOT;
import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerQueueHelpers.checkQueueStructureCapacities;
import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerQueueHelpers.ExpectedCapacities;
import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerQueueHelpers.findQueue;
import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerQueueHelpers.getDefaultCapacities;
import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerQueueHelpers.setupQueueConfWithoutChildrenOfB;
import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerQueueHelpers.setupQueueConfiguration;
import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerQueueHelpers.setupQueueConfigurationWithB1AsParentQueue;
import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerQueueHelpers.setupQueueConfigurationWithoutB;
import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerQueueHelpers.setupQueueConfigurationWithoutB1;
import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerTestUtilities.GB;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
public class TestCapacitySchedulerQueues {
private static final Logger LOG =
LoggerFactory.getLogger(TestCapacitySchedulerQueues.class);
private MockRM rm;
private NullRMNodeLabelsManager mgr;
private CapacitySchedulerConfiguration conf;
@BeforeEach
public void setUp() throws Exception {
conf = new CapacitySchedulerConfiguration();
setupQueueConfiguration(conf);
mgr = new NullRMNodeLabelsManager();
mgr.init(conf);
rm = new MockRM(conf) {
protected RMNodeLabelsManager createNodeLabelManager() {
return mgr;
}
};
CapacityScheduler cs = (CapacityScheduler) rm.getResourceScheduler();
cs.init(conf);
cs.start();
cs.reinitialize(conf, rm.getRMContext());
Resource clusterResource = Resource.newInstance(128 * GB, 128);
mgr.setResourceForLabel(CommonNodeLabelsManager.NO_LABEL, clusterResource);
cs.getRootQueue().updateClusterResource(clusterResource,
new ResourceLimits(clusterResource));
}
@AfterEach
public void tearDown() throws Exception {
if (rm != null) {
rm.stop();
}
if (mgr != null) {
mgr.close();
}
}
/**
* Test that parseQueue throws an exception when two leaf queues have the
* same name.
*
* @throws IOException
*/
@Test
public void testParseQueue() throws IOException {
assertThrows(IOException.class, () -> {
CapacityScheduler cs = new CapacityScheduler();
cs.setConf(new YarnConfiguration());
cs.setRMContext(rm.getRMContext());
cs.init(conf);
cs.start();
conf.setQueues(A1, new String[]{"b1"});
conf.setCapacity(A1_B1, 100.0f);
conf.setUserLimitFactor(A1_B1, 100.0f);
cs.reinitialize(conf, new RMContextImpl(null, null, null, null, null,
null, new RMContainerTokenSecretManager(conf),
new NMTokenSecretManagerInRM(conf),
new ClientToAMTokenSecretManagerInRM(), null));
cs.stop();
});
}
@Test
public void testRefreshQueues() throws Exception {
CapacityScheduler cs = new CapacityScheduler();
setupQueueConfiguration(conf);
cs.setConf(new YarnConfiguration());
cs.setRMContext(rm.getRMContext());
cs.init(conf);
cs.start();
cs.reinitialize(conf, rm.getRMContext());
checkQueueStructureCapacities(cs);
conf.setCapacity(A, 80f);
conf.setCapacity(B, 20f);
cs.reinitialize(conf, rm.getRMContext());
checkQueueStructureCapacities(cs, getDefaultCapacities(80f / 100.0f, 20f / 100.0f));
cs.stop();
}
@Test
public void testRefreshQueuesWithNewQueue() throws Exception {
CapacityScheduler cs = new CapacityScheduler();
cs.setConf(new YarnConfiguration());
cs.setRMContext(rm.getRMContext());
cs.init(conf);
cs.start();
cs.reinitialize(conf, rm.getRMContext());
checkQueueStructureCapacities(cs);
// Add a new queue b4
final String b4Path = B + ".b4";
final QueuePath b4 = new QueuePath(b4Path);
final float b4Capacity = 10;
final float modifiedB3Capacity = B3_CAPACITY - b4Capacity;
try {
conf.setCapacity(A, 80f);
conf.setCapacity(B, 20f);
conf.setQueues(B, new String[]{"b1", "b2", "b3", "b4"});
conf.setCapacity(B1, B1_CAPACITY);
conf.setCapacity(B2, B2_CAPACITY);
conf.setCapacity(B3, modifiedB3Capacity);
conf.setCapacity(b4, b4Capacity);
cs.reinitialize(conf, rm.getRMContext());
final float capA = 80f / 100.0f;
final float capB = 20f / 100.0f;
Map<String, ExpectedCapacities> expectedCapacities =
getDefaultCapacities(capA, capB);
expectedCapacities.put(B3.getFullPath(),
new ExpectedCapacities(modifiedB3Capacity / 100.0f, capB));
expectedCapacities.put(b4Path, new ExpectedCapacities(b4Capacity / 100.0f, capB));
checkQueueStructureCapacities(cs, expectedCapacities);
// Verify parent for B4
CSQueue rootQueue = cs.getRootQueue();
CSQueue queueB = findQueue(rootQueue, B.getFullPath());
CSQueue queueB4 = findQueue(queueB, b4Path);
assertEquals(queueB, queueB4.getParent());
} finally {
cs.stop();
}
}
@Test
public void testRefreshQueuesMaxAllocationRefresh() throws Exception {
// queue refresh should not allow changing the maximum allocation setting
// per queue to be smaller than previous setting
CapacityScheduler cs = new CapacityScheduler();
cs.setConf(new YarnConfiguration());
cs.setRMContext(rm.getRMContext());
cs.init(conf);
cs.start();
cs.reinitialize(conf, rm.getRMContext());
checkQueueStructureCapacities(cs);
assertEquals(YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB,
cs.getMaximumResourceCapability().getMemorySize(), "max allocation in CS");
assertEquals(Resources.none(),
conf.getQueueMaximumAllocation(A1), "max allocation for A1");
assertEquals(YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB,
ResourceUtils.fetchMaximumAllocationFromConfig(conf).getMemorySize(),
"max allocation");
CSQueue rootQueue = cs.getRootQueue();
CSQueue queueA = findQueue(rootQueue, A.getFullPath());
CSQueue queueA1 = findQueue(queueA, A1.getFullPath());
assertEquals(((LeafQueue) queueA1)
.getMaximumAllocation().getMemorySize(), 8192, "queue max allocation");
setMaxAllocMb(conf, A1, 4096);
try {
cs.reinitialize(conf, rm.getRMContext());
fail("should have thrown exception");
} catch (IOException e) {
assertTrue(e.getCause().toString().contains("not be decreased"),
"max allocation exception");
}
setMaxAllocMb(conf, A1, 8192);
cs.reinitialize(conf, rm.getRMContext());
setMaxAllocVcores(conf, A1,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES - 1);
try {
cs.reinitialize(conf, rm.getRMContext());
fail("should have thrown exception");
} catch (IOException e) {
assertTrue(e.getCause().toString().contains("not be decreased"),
"max allocation exception");
}
cs.stop();
}
@Test
public void testRefreshQueuesMaxAllocationPerQueueLarge() throws Exception {
// verify we can't set the allocation per queue larger then cluster setting
CapacityScheduler cs = new CapacityScheduler();
cs.setConf(new YarnConfiguration());
cs.setRMContext(rm.getRMContext());
cs.init(conf);
cs.start();
// change max allocation for B3 queue to be larger then cluster max
setMaxAllocMb(conf, B3,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB + 2048);
try {
cs.reinitialize(conf, rm.getRMContext());
fail("should have thrown exception");
} catch (IOException e) {
assertTrue(e.getCause().getMessage().contains("maximum allocation"),
"maximum allocation exception");
}
setMaxAllocMb(conf, B3,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB);
cs.reinitialize(conf, rm.getRMContext());
setMaxAllocVcores(conf, B3,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES + 1);
try {
cs.reinitialize(conf, rm.getRMContext());
fail("should have thrown exception");
} catch (IOException e) {
assertTrue(e.getCause().getMessage().contains("maximum allocation"),
"maximum allocation exception");
}
cs.stop();
}
@Test
public void testRefreshQueuesMaxAllocationRefreshLarger() throws Exception {
// queue refresh should allow max allocation per queue to go larger
CapacityScheduler cs = new CapacityScheduler();
cs.setConf(new YarnConfiguration());
cs.setRMContext(rm.getRMContext());
setMaxAllocMb(conf,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB);
setMaxAllocVcores(conf,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES);
setMaxAllocMb(conf, A1, 4096);
setMaxAllocVcores(conf, A1, 2);
cs.init(conf);
cs.start();
cs.reinitialize(conf, rm.getRMContext());
checkQueueStructureCapacities(cs);
CSQueue rootQueue = cs.getRootQueue();
CSQueue queueA = findQueue(rootQueue, A.getFullPath());
CSQueue queueA1 = findQueue(queueA, A1.getFullPath());
assertEquals(YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB,
cs.getMaximumResourceCapability().getMemorySize(),
"max capability MB in CS");
assertEquals(YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES,
cs.getMaximumResourceCapability().getVirtualCores(),
"max capability vcores in CS");
assertEquals(4096, queueA1.getMaximumAllocation().getMemorySize(),
"max allocation MB A1");
assertEquals(2, queueA1.getMaximumAllocation().getVirtualCores(),
"max allocation vcores A1");
assertEquals(YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB,
ResourceUtils.fetchMaximumAllocationFromConfig(conf).getMemorySize(),
"cluster max allocation MB");
assertEquals(YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES,
ResourceUtils.fetchMaximumAllocationFromConfig(conf).getVirtualCores(),
"cluster max allocation vcores");
assertEquals(4096, queueA1.getMaximumAllocation().getMemorySize(),
"queue max allocation");
setMaxAllocMb(conf, A1, 6144);
setMaxAllocVcores(conf, A1, 3);
cs.reinitialize(conf, null);
// conf will have changed but we shouldn't be able to change max allocation
// for the actual queue
assertEquals(6144, queueA1.getMaximumAllocation().getMemorySize(),
"max allocation MB A1");
assertEquals(3, queueA1.getMaximumAllocation().getVirtualCores(),
"max allocation vcores A1");
assertEquals(YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB,
ResourceUtils.fetchMaximumAllocationFromConfig(conf).getMemorySize(),
"max allocation MB cluster");
assertEquals(YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES,
ResourceUtils.fetchMaximumAllocationFromConfig(conf).getVirtualCores(),
"max allocation vcores cluster");
assertEquals(6144, queueA1.getMaximumAllocation().getMemorySize(),
"queue max allocation MB");
assertEquals(3, queueA1.getMaximumAllocation().getVirtualCores(),
"queue max allocation vcores");
assertEquals(YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB,
cs.getMaximumResourceCapability().getMemorySize(),
"max capability MB cluster");
assertEquals(YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES,
cs.getMaximumResourceCapability().getVirtualCores(),
"cluster max capability vcores");
cs.stop();
}
@Test
public void testRefreshQueuesMaxAllocationCSError() throws Exception {
// Try to refresh the cluster level max allocation size to be smaller
// and it should error out
CapacityScheduler cs = new CapacityScheduler();
cs.setConf(new YarnConfiguration());
cs.setRMContext(rm.getRMContext());
setMaxAllocMb(conf, 10240);
setMaxAllocVcores(conf, 10);
setMaxAllocMb(conf, A1, 4096);
setMaxAllocVcores(conf, A1, 4);
cs.init(conf);
cs.start();
cs.reinitialize(conf, rm.getRMContext());
checkQueueStructureCapacities(cs);
assertEquals(10240, cs.getMaximumResourceCapability().getMemorySize(),
"max allocation MB in CS");
assertEquals(10, cs.getMaximumResourceCapability().getVirtualCores(),
"max allocation vcores in CS");
setMaxAllocMb(conf, 6144);
try {
cs.reinitialize(conf, rm.getRMContext());
fail("should have thrown exception");
} catch (IOException e) {
assertTrue(e.getCause().toString().contains("not be decreased"),
"max allocation exception");
}
setMaxAllocMb(conf, 10240);
cs.reinitialize(conf, rm.getRMContext());
setMaxAllocVcores(conf, 8);
try {
cs.reinitialize(conf, rm.getRMContext());
fail("should have thrown exception");
} catch (IOException e) {
assertTrue(e.getCause().toString().contains("not be decreased"),
"max allocation exception");
}
cs.stop();
}
@Test
public void testRefreshQueuesMaxAllocationCSLarger() throws Exception {
// Try to refresh the cluster level max allocation size to be larger
// and verify that if there is no setting per queue it uses the
// cluster level setting.
CapacityScheduler cs = new CapacityScheduler();
cs.setConf(new YarnConfiguration());
cs.setRMContext(rm.getRMContext());
setMaxAllocMb(conf, 10240);
setMaxAllocVcores(conf, 10);
setMaxAllocMb(conf, A1, 4096);
setMaxAllocVcores(conf, A1, 4);
cs.init(conf);
cs.start();
cs.reinitialize(conf, rm.getRMContext());
checkQueueStructureCapacities(cs);
assertEquals(10240, cs.getMaximumResourceCapability().getMemorySize(),
"max allocation MB in CS");
assertEquals(10, cs.getMaximumResourceCapability().getVirtualCores(),
"max allocation vcores in CS");
CSQueue rootQueue = cs.getRootQueue();
CSQueue queueA = findQueue(rootQueue, A.getFullPath());
CSQueue queueB = findQueue(rootQueue, B.getFullPath());
CSQueue queueA1 = findQueue(queueA, A1.getFullPath());
CSQueue queueA2 = findQueue(queueA, A2.getFullPath());
CSQueue queueB2 = findQueue(queueB, B2.getFullPath());
assertEquals(4096, queueA1.getMaximumAllocation().getMemorySize(),
"queue A1 max allocation MB");
assertEquals(4, queueA1.getMaximumAllocation().getVirtualCores(),
"queue A1 max allocation vcores");
assertEquals(10240, queueA2.getMaximumAllocation().getMemorySize(),
"queue A2 max allocation MB");
assertEquals(10, queueA2.getMaximumAllocation().getVirtualCores(),
"queue A2 max allocation vcores");
assertEquals(10240, queueB2.getMaximumAllocation().getMemorySize(),
"queue B2 max allocation MB");
assertEquals(10, queueB2.getMaximumAllocation().getVirtualCores(),
"queue B2 max allocation vcores");
setMaxAllocMb(conf, 12288);
setMaxAllocVcores(conf, 12);
cs.reinitialize(conf, null);
// cluster level setting should change and any queues without
// per queue setting
assertEquals(12288, cs.getMaximumResourceCapability().getMemorySize(),
"max allocation MB in CS");
assertEquals(12, cs.getMaximumResourceCapability().getVirtualCores(),
"max allocation vcores in CS");
assertEquals(4096, queueA1.getMaximumAllocation().getMemorySize(),
"queue A1 max MB allocation");
assertEquals(4, queueA1.getMaximumAllocation().getVirtualCores(),
"queue A1 max vcores allocation");
assertEquals(12288, queueA2.getMaximumAllocation().getMemorySize(),
"queue A2 max MB allocation");
assertEquals(12, queueA2.getMaximumAllocation().getVirtualCores(),
"queue A2 max vcores allocation");
assertEquals(12288, queueB2.getMaximumAllocation().getMemorySize(),
"queue B2 max MB allocation");
assertEquals(12, queueB2.getMaximumAllocation().getVirtualCores(),
"queue B2 max vcores allocation");
cs.stop();
}
/**
* Test for queue deletion.
*
* @throws Exception
*/
@Test
public void testRefreshQueuesWithQueueDelete() throws Exception {
CapacityScheduler cs = new CapacityScheduler();
cs.setConf(new YarnConfiguration());
cs.setRMContext(rm.getRMContext());
cs.init(conf);
cs.start();
cs.reinitialize(conf, rm.getRMContext());
checkQueueStructureCapacities(cs);
// test delete leaf queue when there is application running.
Map<String, CSQueue> queues =
cs.getCapacitySchedulerQueueManager().getShortNameQueues();
String b1QTobeDeleted = "b1";
LeafQueue csB1Queue = spy((LeafQueue) queues.get(b1QTobeDeleted));
when(csB1Queue.getState()).thenReturn(QueueState.DRAINING)
.thenReturn(QueueState.STOPPED);
cs.getCapacitySchedulerQueueManager().addQueue(b1QTobeDeleted, csB1Queue);
conf = new CapacitySchedulerConfiguration();
setupQueueConfigurationWithoutB1(conf);
try {
cs.reinitialize(conf, rm.getRMContext());
fail("Expected to throw exception when refresh queue tries to delete a"
+ " queue with running apps");
} catch (IOException e) {
// ignore
}
// test delete leaf queue(root.b.b1) when there is no application running.
conf = new CapacitySchedulerConfiguration();
setupQueueConfigurationWithoutB1(conf);
try {
cs.reinitialize(conf, rm.getRMContext());
} catch (IOException e) {
LOG.error(
"Expected to NOT throw exception when refresh queue tries to delete"
+ " a queue WITHOUT running apps",
e);
fail("Expected to NOT throw exception when refresh queue tries to delete"
+ " a queue WITHOUT running apps");
}
CSQueue rootQueue = cs.getRootQueue();
CSQueue queueB = findQueue(rootQueue, B.getFullPath());
CSQueue queueB3 = findQueue(queueB, B1.getFullPath());
assertNull(queueB3, "Refresh needs to support delete of leaf queue ");
// reset back to default configuration for testing parent queue delete
conf = new CapacitySchedulerConfiguration();
setupQueueConfiguration(conf);
cs.reinitialize(conf, rm.getRMContext());
checkQueueStructureCapacities(cs);
// set the configurations such that it fails once but should be successfull
// next time
queues = cs.getCapacitySchedulerQueueManager().getShortNameQueues();
CSQueue bQueue = spy((ParentQueue) queues.get("b"));
when(bQueue.getState()).thenReturn(QueueState.DRAINING)
.thenReturn(QueueState.STOPPED);
cs.getCapacitySchedulerQueueManager().addQueue("b", bQueue);
bQueue = spy((LeafQueue) queues.get("b1"));
when(bQueue.getState()).thenReturn(QueueState.STOPPED);
cs.getCapacitySchedulerQueueManager().addQueue("b1", bQueue);
bQueue = spy((LeafQueue) queues.get("b2"));
when(bQueue.getState()).thenReturn(QueueState.STOPPED);
cs.getCapacitySchedulerQueueManager().addQueue("b2", bQueue);
bQueue = spy((LeafQueue) queues.get("b3"));
when(bQueue.getState()).thenReturn(QueueState.STOPPED);
cs.getCapacitySchedulerQueueManager().addQueue("b3", bQueue);
// test delete Parent queue when there is application running.
conf = new CapacitySchedulerConfiguration();
setupQueueConfigurationWithoutB(conf);
try {
cs.reinitialize(conf, rm.getRMContext());
fail("Expected to throw exception when refresh queue tries to delete a"
+ " parent queue with running apps in children queue");
} catch (IOException e) {
// ignore
}
// test delete Parent queue when there is no application running.
conf = new CapacitySchedulerConfiguration();
setupQueueConfigurationWithoutB(conf);
try {
cs.reinitialize(conf, rm.getRMContext());
} catch (IOException e) {
fail("Expected to not throw exception when refresh queue tries to delete"
+ " a queue without running apps");
}
rootQueue = cs.getRootQueue();
queueB = findQueue(rootQueue, B.getFullPath());
String message =
"Refresh needs to support delete of Parent queue and its children.";
assertNull(queueB, message);
assertNull(cs.getCapacitySchedulerQueueManager().getQueues().get("b"),
message);
assertNull(cs.getCapacitySchedulerQueueManager().getQueues().get("b1"),
message);
assertNull(cs.getCapacitySchedulerQueueManager().getQueues().get("b2"),
message);
cs.stop();
}
/**
* Test for all child queue deletion and thus making parent queue a child.
*
* @throws Exception
*/
@Test
public void testRefreshQueuesWithAllChildQueuesDeleted() throws Exception {
CapacityScheduler cs = new CapacityScheduler();
cs.setConf(new YarnConfiguration());
cs.setRMContext(rm.getRMContext());
cs.init(conf);
cs.start();
cs.reinitialize(conf, rm.getRMContext());
checkQueueStructureCapacities(cs);
// test delete all leaf queues when there is no application running.
Map<String, CSQueue> queues =
cs.getCapacitySchedulerQueueManager().getShortNameQueues();
CSQueue bQueue = spy((LeafQueue) queues.get("b1"));
when(bQueue.getState()).thenReturn(QueueState.RUNNING)
.thenReturn(QueueState.STOPPED);
cs.getCapacitySchedulerQueueManager().addQueue("b1", bQueue);
bQueue = spy((LeafQueue) queues.get("b2"));
when(bQueue.getState()).thenReturn(QueueState.STOPPED);
cs.getCapacitySchedulerQueueManager().addQueue("b2", bQueue);
bQueue = spy((LeafQueue) queues.get("b3"));
when(bQueue.getState()).thenReturn(QueueState.STOPPED);
cs.getCapacitySchedulerQueueManager().addQueue("b3", bQueue);
conf = new CapacitySchedulerConfiguration();
setupQueueConfWithoutChildrenOfB(conf);
// test convert parent queue to leaf queue(root.b) when there is no
// application running.
try {
cs.reinitialize(conf, rm.getRMContext());
fail("Expected to throw exception when refresh queue tries to make parent"
+ " queue a child queue when one of its children is still running.");
} catch (IOException e) {
//do not do anything, expected exception
}
// test delete leaf queues(root.b.b1,b2,b3) when there is no application
// running.
try {
cs.reinitialize(conf, rm.getRMContext());
} catch (IOException e) {
e.printStackTrace();
fail("Expected to NOT throw exception when refresh queue tries to delete"
+ " all children of a parent queue(without running apps).");
}
CSQueue rootQueue = cs.getRootQueue();
CSQueue queueB = findQueue(rootQueue, B.getFullPath());
assertNotNull(queueB, "Parent Queue B should not be deleted");
assertTrue(queueB instanceof LeafQueue,
"As Queue'B children are not deleted");
String message =
"Refresh needs to support delete of all children of Parent queue.";
assertNull(cs.getCapacitySchedulerQueueManager().getQueues().get("b3"),
message);
assertNull(cs.getCapacitySchedulerQueueManager().getQueues().get("b1"),
message);
assertNull(cs.getCapacitySchedulerQueueManager().getQueues().get("b2"),
message);
cs.stop();
}
/**
* Test if we can convert a leaf queue to a parent queue.
*
* @throws Exception
*/
@Test
@Timeout(value = 10)
public void testConvertLeafQueueToParentQueue() throws Exception {
CapacityScheduler cs = new CapacityScheduler();
cs.setConf(new YarnConfiguration());
cs.setRMContext(rm.getRMContext());
cs.init(conf);
cs.start();
cs.reinitialize(conf, rm.getRMContext());
checkQueueStructureCapacities(cs);
String targetQueue = "b1";
CSQueue b1 = cs.getQueue(targetQueue);
assertEquals(QueueState.RUNNING, b1.getState());
// test if we can convert a leaf queue which is in RUNNING state
conf = new CapacitySchedulerConfiguration();
setupQueueConfigurationWithB1AsParentQueue(conf);
try {
cs.reinitialize(conf, rm.getRMContext());
fail("Expected to throw exception when refresh queue tries to convert"
+ " a child queue to a parent queue.");
} catch (IOException e) {
// ignore
}
// now set queue state for b1 to STOPPED
conf = new CapacitySchedulerConfiguration();
setupQueueConfiguration(conf);
conf.set("yarn.scheduler.capacity.root.b.b1.state", "STOPPED");
cs.reinitialize(conf, rm.getRMContext());
assertEquals(QueueState.STOPPED, b1.getState());
// test if we can convert a leaf queue which is in STOPPED state
conf = new CapacitySchedulerConfiguration();
setupQueueConfigurationWithB1AsParentQueue(conf);
try {
cs.reinitialize(conf, rm.getRMContext());
} catch (IOException e) {
fail("Expected to NOT throw exception when refresh queue tries"
+ " to convert a leaf queue WITHOUT running apps");
}
b1 = cs.getQueue(targetQueue);
assertTrue(b1 instanceof AbstractParentQueue);
assertEquals(QueueState.RUNNING, b1.getState());
assertTrue(!b1.getChildQueues().isEmpty());
cs.stop();
}
@Test
public void testQueuesMaxAllocationInheritance() throws Exception {
// queue level max allocation is set by the queue configuration explicitly
// or inherits from the parent.
CapacityScheduler cs = new CapacityScheduler();
cs.setConf(new YarnConfiguration());
cs.setRMContext(rm.getRMContext());
setMaxAllocMb(conf,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB);
setMaxAllocVcores(conf,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES);
// Test the child queue overrides
setMaxAllocation(conf, ROOT,
"memory-mb=4096,vcores=2");
setMaxAllocation(conf, A1, "memory-mb=6144,vcores=2");
setMaxAllocation(conf, B, "memory-mb=5120, vcores=2");
setMaxAllocation(conf, B2, "memory-mb=1024, vcores=2");
cs.init(conf);
cs.start();
cs.reinitialize(conf, rm.getRMContext());
checkQueueStructureCapacities(cs);
CSQueue rootQueue = cs.getRootQueue();
CSQueue queueA = findQueue(rootQueue, A.getFullPath());
CSQueue queueB = findQueue(rootQueue, B.getFullPath());
CSQueue queueA1 = findQueue(queueA, A1.getFullPath());
CSQueue queueA2 = findQueue(queueA, A2.getFullPath());
CSQueue queueB1 = findQueue(queueB, B1.getFullPath());
CSQueue queueB2 = findQueue(queueB, B2.getFullPath());
assertEquals(YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB,
cs.getMaximumResourceCapability().getMemorySize(),
"max capability MB in CS");
assertEquals(YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES,
cs.getMaximumResourceCapability().getVirtualCores(),
"max capability vcores in CS");
assertEquals(6144, queueA1.getMaximumAllocation().getMemorySize(),
"max allocation MB A1");
assertEquals(2, queueA1.getMaximumAllocation().getVirtualCores(),
"max allocation vcores A1");
assertEquals(4096, queueA2.getMaximumAllocation().getMemorySize(),
"max allocation MB A2");
assertEquals(2, queueA2.getMaximumAllocation().getVirtualCores(),
"max allocation vcores A2");
assertEquals(5120, queueB.getMaximumAllocation().getMemorySize(),
"max allocation MB B");
assertEquals(5120, queueB1.getMaximumAllocation().getMemorySize(),
"max allocation MB B1");
assertEquals(1024, queueB2.getMaximumAllocation().getMemorySize(),
"max allocation MB B2");
// Test get the max-allocation from different parent
unsetMaxAllocation(conf, A1);
unsetMaxAllocation(conf, B);
unsetMaxAllocation(conf, B1);
setMaxAllocation(conf, ROOT,
"memory-mb=6144,vcores=2");
setMaxAllocation(conf, A, "memory-mb=8192,vcores=2");
cs.reinitialize(conf, rm.getRMContext());
assertEquals(YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB,
cs.getMaximumResourceCapability().getMemorySize(),
"max capability MB in CS");
assertEquals(YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES,
cs.getMaximumResourceCapability().getVirtualCores(),
"max capability vcores in CS");
assertEquals(8192, queueA1.getMaximumAllocation().getMemorySize(),
"max allocation MB A1");
assertEquals(2, queueA1.getMaximumAllocation().getVirtualCores(),
"max allocation vcores A1");
assertEquals(6144, queueB1.getMaximumAllocation().getMemorySize(),
"max allocation MB B1");
assertEquals(2, queueB1.getMaximumAllocation().getVirtualCores(),
"max allocation vcores B1");
// Test the default
unsetMaxAllocation(conf, ROOT);
unsetMaxAllocation(conf, A);
unsetMaxAllocation(conf, A1);
cs.reinitialize(conf, rm.getRMContext());
assertEquals(YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB,
cs.getMaximumResourceCapability().getMemorySize(),
"max capability MB in CS");
assertEquals(YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES,
cs.getMaximumResourceCapability().getVirtualCores(),
"max capability vcores in CS");
assertEquals(YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB,
queueA1.getMaximumAllocation().getMemorySize(), "max allocation MB A1");
assertEquals(YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES,
queueA1.getMaximumAllocation().getVirtualCores(),
"max allocation vcores A1");
assertEquals(YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB,
queueA2.getMaximumAllocation().getMemorySize(), "max allocation MB A2");
assertEquals(YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES,
queueA2.getMaximumAllocation().getVirtualCores(),
"max allocation vcores A2");
cs.stop();
}
@Test
public void testVerifyQueuesMaxAllocationConf() throws Exception {
// queue level max allocation can't exceed the cluster setting
CapacityScheduler cs = new CapacityScheduler();
cs.setConf(new YarnConfiguration());
cs.setRMContext(rm.getRMContext());
setMaxAllocMb(conf,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB);
setMaxAllocVcores(conf,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES);
long largerMem =
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB + 1024;
long largerVcores =
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES + 10;
cs.init(conf);
cs.start();
cs.reinitialize(conf, rm.getRMContext());
checkQueueStructureCapacities(cs);
setMaxAllocation(conf, ROOT,
"memory-mb=" + largerMem + ",vcores=2");
try {
cs.reinitialize(conf, rm.getRMContext());
fail("Queue Root maximum allocation can't exceed the cluster setting");
} catch (Exception e) {
assertTrue(e.getCause().getMessage().contains("maximum allocation"),
"maximum allocation exception");
}
setMaxAllocation(conf, ROOT,
"memory-mb=4096,vcores=2");
setMaxAllocation(conf, A, "memory-mb=6144,vcores=2");
setMaxAllocation(conf, A1, "memory-mb=" + largerMem + ",vcores=2");
try {
cs.reinitialize(conf, rm.getRMContext());
fail("Queue A1 maximum allocation can't exceed the cluster setting");
} catch (Exception e) {
assertTrue(e.getCause().getMessage().contains("maximum allocation"),
"maximum allocation exception");
}
setMaxAllocation(conf, A1, "memory-mb=8192" + ",vcores=" + largerVcores);
try {
cs.reinitialize(conf, rm.getRMContext());
fail("Queue A1 maximum allocation can't exceed the cluster setting");
} catch (Exception e) {
assertTrue(e.getCause().getMessage().contains("maximum allocation"),
"maximum allocation exception");
}
cs.stop();
}
}
|
apache/orc | 37,168 | java/core/src/java/org/apache/orc/impl/WriterImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.orc.impl;
import com.github.luben.zstd.util.Native;
import com.google.protobuf.ByteString;
import io.airlift.compress.lz4.Lz4Compressor;
import io.airlift.compress.lz4.Lz4Decompressor;
import io.airlift.compress.lzo.LzoCompressor;
import io.airlift.compress.lzo.LzoDecompressor;
import io.airlift.compress.zstd.ZstdCompressor;
import io.airlift.compress.zstd.ZstdDecompressor;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.orc.ColumnStatistics;
import org.apache.orc.CompressionCodec;
import org.apache.orc.CompressionKind;
import org.apache.orc.DataMask;
import org.apache.orc.MemoryManager;
import org.apache.orc.OrcConf;
import org.apache.orc.OrcFile;
import org.apache.orc.OrcProto;
import org.apache.orc.OrcUtils;
import org.apache.orc.PhysicalWriter;
import org.apache.orc.StripeInformation;
import org.apache.orc.StripeStatistics;
import org.apache.orc.TypeDescription;
import org.apache.orc.impl.writer.StreamOptions;
import org.apache.orc.impl.writer.TreeWriter;
import org.apache.orc.impl.writer.WriterContext;
import org.apache.orc.impl.writer.WriterEncryptionKey;
import org.apache.orc.impl.writer.WriterEncryptionVariant;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.TimeZone;
import java.util.TreeMap;
/**
* An ORC file writer. The file is divided into stripes, which is the natural
* unit of work when reading. Each stripe is buffered in memory until the
* memory reaches the stripe size and then it is written out broken down by
* columns. Each column is written by a TreeWriter that is specific to that
* type of column. TreeWriters may have children TreeWriters that handle the
* sub-types. Each of the TreeWriters writes the column's data as a set of
* streams.
* <p>
* This class is unsynchronized like most Stream objects, so from the creation
* of an OrcFile and all access to a single instance has to be from a single
* thread.
* <p>
* There are no known cases where these happen between different threads today.
* <p>
* Caveat: the MemoryManager is created during WriterOptions create, that has
* to be confined to a single thread as well.
*
*/
public class WriterImpl implements WriterInternal, MemoryManager.Callback {
private static final Logger LOG = LoggerFactory.getLogger(WriterImpl.class);
private static final int MIN_ROW_INDEX_STRIDE = 1000;
private final Path path;
private final long stripeSize;
private final long stripeRowCount;
private final int rowIndexStride;
private final TypeDescription schema;
private final PhysicalWriter physicalWriter;
private final OrcFile.WriterVersion writerVersion;
private final StreamOptions unencryptedOptions;
private long rowCount = 0;
private long rowsInStripe = 0;
private long rawDataSize = 0;
private int rowsInIndex = 0;
private long lastFlushOffset = 0;
private int stripesAtLastFlush = -1;
private final List<OrcProto.StripeInformation> stripes =
new ArrayList<>();
private final Map<String, ByteString> userMetadata =
new TreeMap<>();
private final TreeWriter treeWriter;
private final boolean buildIndex;
private final MemoryManager memoryManager;
private long previousAllocation = -1;
private long memoryLimit;
private final long rowsPerCheck;
private final double stripeSizePerCheck;
private long rowsSinceCheck = 0;
private final OrcFile.Version version;
private final Configuration conf;
private final OrcFile.WriterCallback callback;
private final OrcFile.WriterContext callbackContext;
private final OrcFile.EncodingStrategy encodingStrategy;
private final OrcFile.CompressionStrategy compressionStrategy;
private final boolean[] bloomFilterColumns;
private final double bloomFilterFpp;
private final OrcFile.BloomFilterVersion bloomFilterVersion;
private final boolean writeTimeZone;
private final boolean useUTCTimeZone;
private final double dictionaryKeySizeThreshold;
private final boolean[] directEncodingColumns;
private final List<OrcProto.ColumnEncoding> unencryptedEncodings =
new ArrayList<>();
// the list of maskDescriptions, keys, and variants
private SortedMap<String, MaskDescriptionImpl> maskDescriptions = new TreeMap<>();
private SortedMap<String, WriterEncryptionKey> keys = new TreeMap<>();
private final WriterEncryptionVariant[] encryption;
// the mapping of columns to maskDescriptions
private final MaskDescriptionImpl[] columnMaskDescriptions;
// the mapping of columns to EncryptionVariants
private final WriterEncryptionVariant[] columnEncryption;
private KeyProvider keyProvider;
// do we need to include the current encryption keys in the next stripe
// information
private boolean needKeyFlush;
private final boolean useProlepticGregorian;
private boolean isClose = false;
public WriterImpl(FileSystem fs,
Path path,
OrcFile.WriterOptions opts) throws IOException {
this.path = path;
this.conf = opts.getConfiguration();
// clone it so that we can annotate it with encryption
this.schema = opts.getSchema().clone();
int numColumns = schema.getMaximumId() + 1;
if (!opts.isEnforceBufferSize()) {
opts.bufferSize(getEstimatedBufferSize(opts.getStripeSize(), numColumns,
opts.getBufferSize()));
}
// Annotate the schema with the column encryption
schema.annotateEncryption(opts.getEncryption(), opts.getMasks());
columnEncryption = new WriterEncryptionVariant[numColumns];
columnMaskDescriptions = new MaskDescriptionImpl[numColumns];
encryption = setupEncryption(opts.getKeyProvider(), schema,
opts.getKeyOverrides());
needKeyFlush = encryption.length > 0;
this.directEncodingColumns = OrcUtils.includeColumns(
opts.getDirectEncodingColumns(), opts.getSchema());
dictionaryKeySizeThreshold =
OrcConf.DICTIONARY_KEY_SIZE_THRESHOLD.getDouble(conf);
this.callback = opts.getCallback();
if (callback != null) {
callbackContext = () -> WriterImpl.this;
} else {
callbackContext = null;
}
this.useProlepticGregorian = opts.getProlepticGregorian();
this.writeTimeZone = hasTimestamp(schema);
this.useUTCTimeZone = opts.getUseUTCTimestamp();
this.encodingStrategy = opts.getEncodingStrategy();
this.compressionStrategy = opts.getCompressionStrategy();
// ORC-1362: if isBuildIndex=false, then rowIndexStride will be set to 0.
if (opts.getRowIndexStride() >= 0 && opts.isBuildIndex()) {
this.rowIndexStride = opts.getRowIndexStride();
} else {
this.rowIndexStride = 0;
}
this.buildIndex = rowIndexStride > 0;
if (buildIndex && rowIndexStride < MIN_ROW_INDEX_STRIDE) {
throw new IllegalArgumentException("Row stride must be at least " +
MIN_ROW_INDEX_STRIDE);
}
this.writerVersion = opts.getWriterVersion();
this.version = opts.getVersion();
if (version == OrcFile.Version.FUTURE) {
throw new IllegalArgumentException("Can not write in a unknown version.");
} else if (version == OrcFile.Version.UNSTABLE_PRE_2_0) {
LOG.warn("ORC files written in " + version.getName() + " will not be" +
" readable by other versions of the software. It is only for" +
" developer testing.");
}
this.bloomFilterVersion = opts.getBloomFilterVersion();
this.bloomFilterFpp = opts.getBloomFilterFpp();
/* do not write bloom filters for ORC v11 */
if (!buildIndex || version == OrcFile.Version.V_0_11) {
this.bloomFilterColumns = new boolean[schema.getMaximumId() + 1];
} else {
this.bloomFilterColumns =
OrcUtils.includeColumns(opts.getBloomFilterColumns(), schema);
}
// ensure that we are able to handle callbacks before we register ourselves
rowsPerCheck = Math.min(opts.getStripeRowCountValue(),
OrcConf.ROWS_BETWEEN_CHECKS.getLong(conf));
this.stripeRowCount= opts.getStripeRowCountValue();
this.stripeSize = opts.getStripeSize();
memoryLimit = stripeSize;
double stripeSizeCheckRatio = OrcConf.STRIPE_SIZE_CHECKRATIO.getDouble(conf);
stripeSizePerCheck = stripeSizeCheckRatio <= 0 ? 0 : stripeSizeCheckRatio * stripeSize;
memoryManager = opts.getMemoryManager();
memoryManager.addWriter(path, stripeSize, this);
// Set up the physical writer
this.physicalWriter = opts.getPhysicalWriter() == null ?
new PhysicalFsWriter(fs, path, opts, encryption) :
opts.getPhysicalWriter();
physicalWriter.writeHeader();
unencryptedOptions = physicalWriter.getStreamOptions();
OutStream.assertBufferSizeValid(unencryptedOptions.getBufferSize());
treeWriter = TreeWriter.Factory.create(schema, null, new StreamFactory());
LOG.debug("ORC writer created for path: {} with stripeSize: {} options: {}",
path, stripeSize, unencryptedOptions);
}
//@VisibleForTesting
public static int getEstimatedBufferSize(long stripeSize, int numColumns,
int bs) {
// The worst case is that there are 2 big streams per a column and
// we want to guarantee that each stream gets ~10 buffers.
// This keeps buffers small enough that we don't get really small stripe
// sizes.
int estBufferSize = (int) (stripeSize / (20L * numColumns));
estBufferSize = getClosestBufferSize(estBufferSize);
return Math.min(estBufferSize, bs);
}
@Override
public void increaseCompressionSize(int newSize) {
if (newSize > unencryptedOptions.getBufferSize()) {
unencryptedOptions.bufferSize(newSize);
}
}
/**
* Given a buffer size, return the nearest superior power of 2. Min value is
* 4Kib, Max value is 256Kib.
*
* @param size Proposed buffer size
* @return the suggested buffer size
*/
private static int getClosestBufferSize(int size) {
final int kb4 = 4 * 1024;
final int kb256 = 256 * 1024;
final int pow2 = size == 1 ? 1 : Integer.highestOneBit(size - 1) * 2;
return Math.min(kb256, Math.max(kb4, pow2));
}
static {
try {
if (!"java".equalsIgnoreCase(System.getProperty("orc.compression.zstd.impl"))) {
Native.load();
}
} catch (UnsatisfiedLinkError | ExceptionInInitializerError e) {
LOG.warn("Unable to load zstd-jni library for your platform. " +
"Using builtin-java classes where applicable");
}
}
public static CompressionCodec createCodec(CompressionKind kind) {
switch (kind) {
case NONE:
return null;
case ZLIB:
return new ZlibCodec();
case SNAPPY:
return new SnappyCodec();
case LZO:
return new AircompressorCodec(kind, new LzoCompressor(),
new LzoDecompressor());
case LZ4:
return new AircompressorCodec(kind, new Lz4Compressor(),
new Lz4Decompressor());
case ZSTD:
if ("java".equalsIgnoreCase(System.getProperty("orc.compression.zstd.impl"))) {
return new AircompressorCodec(kind, new ZstdCompressor(),
new ZstdDecompressor());
}
if (Native.isLoaded()) {
return new ZstdCodec();
} else {
return new AircompressorCodec(kind, new ZstdCompressor(),
new ZstdDecompressor());
}
case BROTLI:
return new BrotliCodec();
default:
throw new IllegalArgumentException("Unknown compression codec: " +
kind);
}
}
@Override
public boolean checkMemory(double newScale) throws IOException {
memoryLimit = Math.round(stripeSize * newScale);
return checkMemory();
}
private boolean checkMemory() throws IOException {
long size = rowsSinceCheck < rowsPerCheck && stripeSizePerCheck == 0
? 0 : treeWriter.estimateMemory();
if (rowsSinceCheck >= rowsPerCheck || size > stripeSizePerCheck) {
rowsSinceCheck = 0;
if (LOG.isDebugEnabled()) {
LOG.debug("ORC writer " + physicalWriter + " size = " + size +
" memoryLimit = " + memoryLimit + " rowsInStripe = " + rowsInStripe +
" stripeRowCountLimit = " + stripeRowCount);
}
if (size > memoryLimit || rowsInStripe >= stripeRowCount) {
flushStripe();
return true;
}
}
return false;
}
/**
* Interface from the Writer to the TreeWriters. This limits the visibility
* that the TreeWriters have into the Writer.
*/
private class StreamFactory implements WriterContext {
/**
* Create a stream to store part of a column.
* @param name the name for the stream
* @return The output outStream that the section needs to be written to.
*/
@Override
public OutStream createStream(StreamName name) throws IOException {
StreamOptions options = SerializationUtils.getCustomizedCodec(
unencryptedOptions, compressionStrategy, name.getKind());
WriterEncryptionVariant encryption =
(WriterEncryptionVariant) name.getEncryption();
if (encryption != null) {
if (options == unencryptedOptions) {
options = new StreamOptions(options);
}
options.withEncryption(encryption.getKeyDescription().getAlgorithm(),
encryption.getFileFooterKey())
.modifyIv(CryptoUtils.modifyIvForStream(name, 1));
}
return new OutStream(name, options, physicalWriter.createDataStream(name));
}
/**
* Get the stride rate of the row index.
*/
@Override
public int getRowIndexStride() {
return rowIndexStride;
}
/**
* Should be building the row index.
* @return true if we are building the index
*/
@Override
public boolean buildIndex() {
return buildIndex;
}
/**
* Is the ORC file compressed?
* @return are the streams compressed
*/
@Override
public boolean isCompressed() {
return unencryptedOptions.getCodec() != null;
}
/**
* Get the encoding strategy to use.
* @return encoding strategy
*/
@Override
public OrcFile.EncodingStrategy getEncodingStrategy() {
return encodingStrategy;
}
/**
* Get the bloom filter columns
* @return bloom filter columns
*/
@Override
public boolean[] getBloomFilterColumns() {
return bloomFilterColumns;
}
/**
* Get bloom filter false positive percentage.
* @return fpp
*/
@Override
public double getBloomFilterFPP() {
return bloomFilterFpp;
}
/**
* Get the writer's configuration.
* @return configuration
*/
@Override
public Configuration getConfiguration() {
return conf;
}
/**
* Get the version of the file to write.
*/
@Override
public OrcFile.Version getVersion() {
return version;
}
/**
* Get the PhysicalWriter.
*
* @return the file's physical writer.
*/
@Override
public PhysicalWriter getPhysicalWriter() {
return physicalWriter;
}
@Override
@Deprecated
public OrcFile.BloomFilterVersion getBloomFilterVersion() {
return bloomFilterVersion;
}
@Override
public void writeIndex(StreamName name,
OrcProto.RowIndex.Builder index) throws IOException {
physicalWriter.writeIndex(name, index);
}
@Override
public void writeBloomFilter(StreamName name,
OrcProto.BloomFilterIndex.Builder bloom
) throws IOException {
physicalWriter.writeBloomFilter(name, bloom);
}
@Override
public WriterEncryptionVariant getEncryption(int columnId) {
return columnId < columnEncryption.length ?
columnEncryption[columnId] : null;
}
@Override
public DataMask getUnencryptedMask(int columnId) {
if (columnMaskDescriptions != null) {
MaskDescriptionImpl descr = columnMaskDescriptions[columnId];
if (descr != null) {
return DataMask.Factory.build(descr, schema.findSubtype(columnId),
(type) -> columnMaskDescriptions[type.getId()]);
}
}
return null;
}
@Override
public void setEncoding(int column, WriterEncryptionVariant encryption,
OrcProto.ColumnEncoding encoding) {
if (encryption == null) {
unencryptedEncodings.add(encoding);
} else {
encryption.addEncoding(encoding);
}
}
@Override
public void writeStatistics(StreamName name,
OrcProto.ColumnStatistics.Builder stats
) throws IOException {
physicalWriter.writeStatistics(name, stats);
}
@Override
public boolean getUseUTCTimestamp() {
return useUTCTimeZone;
}
@Override
public double getDictionaryKeySizeThreshold(int columnId) {
return directEncodingColumns[columnId] ? 0.0 : dictionaryKeySizeThreshold;
}
@Override
public boolean getProlepticGregorian() {
return useProlepticGregorian;
}
}
private static void writeTypes(OrcProto.Footer.Builder builder,
TypeDescription schema) {
builder.addAllTypes(OrcUtils.getOrcTypes(schema));
}
private void createRowIndexEntry() throws IOException {
treeWriter.createRowIndexEntry();
rowsInIndex = 0;
}
/**
* Write the encrypted keys into the StripeInformation along with the
* stripe id, so that the readers can decrypt the data.
* @param dirEntry the entry to modify
*/
private void addEncryptedKeys(OrcProto.StripeInformation.Builder dirEntry) {
for(WriterEncryptionVariant variant: encryption) {
dirEntry.addEncryptedLocalKeys(ByteString.copyFrom(
variant.getMaterial().getEncryptedKey()));
}
dirEntry.setEncryptStripeId(1 + stripes.size());
}
private void flushStripe() throws IOException {
if (buildIndex && rowsInIndex != 0) {
createRowIndexEntry();
}
if (rowsInStripe != 0) {
if (callback != null) {
callback.preStripeWrite(callbackContext);
}
// finalize the data for the stripe
int requiredIndexEntries = rowIndexStride == 0 ? 0 :
(int) ((rowsInStripe + rowIndexStride - 1) / rowIndexStride);
OrcProto.StripeFooter.Builder builder =
OrcProto.StripeFooter.newBuilder();
if (writeTimeZone) {
if (useUTCTimeZone) {
builder.setWriterTimezone("UTC");
} else {
builder.setWriterTimezone(TimeZone.getDefault().getID());
}
}
treeWriter.flushStreams();
treeWriter.writeStripe(requiredIndexEntries);
// update the encodings
builder.addAllColumns(unencryptedEncodings);
unencryptedEncodings.clear();
for (WriterEncryptionVariant writerEncryptionVariant : encryption) {
OrcProto.StripeEncryptionVariant.Builder encrypt =
OrcProto.StripeEncryptionVariant.newBuilder();
encrypt.addAllEncoding(writerEncryptionVariant.getEncodings());
writerEncryptionVariant.clearEncodings();
builder.addEncryption(encrypt);
}
OrcProto.StripeInformation.Builder dirEntry =
OrcProto.StripeInformation.newBuilder()
.setNumberOfRows(rowsInStripe);
if (encryption.length > 0 && needKeyFlush) {
addEncryptedKeys(dirEntry);
needKeyFlush = false;
}
physicalWriter.finalizeStripe(builder, dirEntry);
stripes.add(dirEntry.build());
rowCount += rowsInStripe;
rowsInStripe = 0;
}
}
private long computeRawDataSize() {
return treeWriter.getRawDataSize();
}
private OrcProto.CompressionKind writeCompressionKind(CompressionKind kind) {
switch (kind) {
case NONE: return OrcProto.CompressionKind.NONE;
case ZLIB: return OrcProto.CompressionKind.ZLIB;
case SNAPPY: return OrcProto.CompressionKind.SNAPPY;
case LZO: return OrcProto.CompressionKind.LZO;
case LZ4: return OrcProto.CompressionKind.LZ4;
case ZSTD: return OrcProto.CompressionKind.ZSTD;
case BROTLI: return OrcProto.CompressionKind.BROTLI;
default:
throw new IllegalArgumentException("Unknown compression " + kind);
}
}
private void writeMetadata() throws IOException {
// The physical writer now has the stripe statistics, so we pass a
// new builder in here.
physicalWriter.writeFileMetadata(OrcProto.Metadata.newBuilder());
}
private long writePostScript() throws IOException {
OrcProto.PostScript.Builder builder =
OrcProto.PostScript.newBuilder()
.setMagic(OrcFile.MAGIC)
.addVersion(version.getMajor())
.addVersion(version.getMinor())
.setWriterVersion(writerVersion.getId());
CompressionCodec codec = unencryptedOptions.getCodec();
if (codec == null) {
builder.setCompression(OrcProto.CompressionKind.NONE);
} else {
builder.setCompression(writeCompressionKind(codec.getKind()))
.setCompressionBlockSize(unencryptedOptions.getBufferSize());
}
return physicalWriter.writePostScript(builder);
}
private OrcProto.EncryptionKey.Builder writeEncryptionKey(WriterEncryptionKey key) {
OrcProto.EncryptionKey.Builder result = OrcProto.EncryptionKey.newBuilder();
HadoopShims.KeyMetadata meta = key.getMetadata();
result.setKeyName(meta.getKeyName());
result.setKeyVersion(meta.getVersion());
result.setAlgorithm(OrcProto.EncryptionAlgorithm.forNumber(
meta.getAlgorithm().getSerialization()));
return result;
}
private OrcProto.EncryptionVariant.Builder
writeEncryptionVariant(WriterEncryptionVariant variant) {
OrcProto.EncryptionVariant.Builder result =
OrcProto.EncryptionVariant.newBuilder();
result.setRoot(variant.getRoot().getId());
result.setKey(variant.getKeyDescription().getId());
result.setEncryptedKey(ByteString.copyFrom(variant.getMaterial().getEncryptedKey()));
return result;
}
private OrcProto.Encryption.Builder writeEncryptionFooter() {
OrcProto.Encryption.Builder encrypt = OrcProto.Encryption.newBuilder();
for(MaskDescriptionImpl mask: maskDescriptions.values()) {
OrcProto.DataMask.Builder maskBuilder = OrcProto.DataMask.newBuilder();
maskBuilder.setName(mask.getName());
for(String param: mask.getParameters()) {
maskBuilder.addMaskParameters(param);
}
for(TypeDescription column: mask.getColumns()) {
maskBuilder.addColumns(column.getId());
}
encrypt.addMask(maskBuilder);
}
for(WriterEncryptionKey key: keys.values()) {
encrypt.addKey(writeEncryptionKey(key));
}
for(WriterEncryptionVariant variant: encryption) {
encrypt.addVariants(writeEncryptionVariant(variant));
}
encrypt.setKeyProvider(OrcProto.KeyProviderKind.forNumber(
keyProvider.getKind().getValue()));
return encrypt;
}
private long writeFooter() throws IOException {
writeMetadata();
OrcProto.Footer.Builder builder = OrcProto.Footer.newBuilder();
builder.setNumberOfRows(rowCount);
builder.setRowIndexStride(rowIndexStride);
rawDataSize = computeRawDataSize();
// serialize the types
writeTypes(builder, schema);
builder.setCalendar(useProlepticGregorian
? OrcProto.CalendarKind.PROLEPTIC_GREGORIAN
: OrcProto.CalendarKind.JULIAN_GREGORIAN);
// add the stripe information
for(OrcProto.StripeInformation stripe: stripes) {
builder.addStripes(stripe);
}
// add the column statistics
treeWriter.writeFileStatistics();
// add all of the user metadata
for(Map.Entry<String, ByteString> entry: userMetadata.entrySet()) {
builder.addMetadata(OrcProto.UserMetadataItem.newBuilder()
.setName(entry.getKey()).setValue(entry.getValue()));
}
if (encryption.length > 0) {
builder.setEncryption(writeEncryptionFooter());
}
builder.setWriter(OrcFile.WriterImplementation.ORC_JAVA.getId());
builder.setSoftwareVersion(OrcUtils.getOrcVersion());
physicalWriter.writeFileFooter(builder);
return writePostScript();
}
@Override
public TypeDescription getSchema() {
return schema;
}
@Override
public void addUserMetadata(String name, ByteBuffer value) {
userMetadata.put(name, ByteString.copyFrom(value));
}
@Override
public void addRowBatch(VectorizedRowBatch batch) throws IOException {
try {
// If this is the first set of rows in this stripe, tell the tree writers
// to prepare the stripe.
if (batch.size != 0 && rowsInStripe == 0) {
treeWriter.prepareStripe(stripes.size() + 1);
}
if (buildIndex) {
// Batch the writes up to the rowIndexStride so that we can get the
// right size indexes.
int posn = 0;
while (posn < batch.size) {
int chunkSize = Math.min(batch.size - posn,
rowIndexStride - rowsInIndex);
if (batch.isSelectedInUse()) {
// find the longest chunk that is continuously selected from posn
for (int len = 1; len < chunkSize; ++len) {
if (batch.selected[posn + len] - batch.selected[posn] != len) {
chunkSize = len;
break;
}
}
treeWriter.writeRootBatch(batch, batch.selected[posn], chunkSize);
} else {
treeWriter.writeRootBatch(batch, posn, chunkSize);
}
posn += chunkSize;
rowsInIndex += chunkSize;
rowsInStripe += chunkSize;
if (rowsInIndex >= rowIndexStride) {
createRowIndexEntry();
}
}
} else {
if (batch.isSelectedInUse()) {
int posn = 0;
while (posn < batch.size) {
int chunkSize = 1;
while (posn + chunkSize < batch.size) {
// find the longest chunk that is continuously selected from posn
if (batch.selected[posn + chunkSize] - batch.selected[posn] != chunkSize) {
break;
}
++chunkSize;
}
treeWriter.writeRootBatch(batch, batch.selected[posn], chunkSize);
posn += chunkSize;
}
} else {
treeWriter.writeRootBatch(batch, 0, batch.size);
}
rowsInStripe += batch.size;
}
rowsSinceCheck += batch.size;
previousAllocation = memoryManager.checkMemory(previousAllocation, this);
checkMemory();
} catch (Throwable t) {
try {
close();
} catch (Throwable ignore) {
// ignore
}
if (t instanceof IOException) {
throw (IOException) t;
} else {
throw new IOException("Problem adding row to " + path, t);
}
}
}
@Override
public void close() throws IOException {
if (!isClose) {
try {
if (callback != null) {
callback.preFooterWrite(callbackContext);
}
// remove us from the memory manager so that we don't get any callbacks
memoryManager.removeWriter(path);
// actually close the file
flushStripe();
lastFlushOffset = writeFooter();
physicalWriter.close();
} finally {
isClose = true;
}
}
}
/**
* Raw data size will be compute when writing the file footer. Hence raw data
* size value will be available only after closing the writer.
*/
@Override
public long getRawDataSize() {
return rawDataSize;
}
/**
* Row count gets updated when flushing the stripes. To get accurate row
* count call this method after writer is closed.
*/
@Override
public long getNumberOfRows() {
return rowCount;
}
@Override
public long writeIntermediateFooter() throws IOException {
// flush any buffered rows
flushStripe();
// write a footer
if (stripesAtLastFlush != stripes.size()) {
if (callback != null) {
callback.preFooterWrite(callbackContext);
}
lastFlushOffset = writeFooter();
stripesAtLastFlush = stripes.size();
physicalWriter.flush();
}
return lastFlushOffset;
}
private static void checkArgument(boolean expression, String message) {
if (!expression) {
throw new IllegalArgumentException(message);
}
}
@Override
public void appendStripe(byte[] stripe, int offset, int length,
StripeInformation stripeInfo,
OrcProto.StripeStatistics stripeStatistics
) throws IOException {
appendStripe(stripe, offset, length, stripeInfo,
new StripeStatistics[]{
new StripeStatisticsImpl(schema, stripeStatistics.getColStatsList(),
false, false)});
}
@Override
public void appendStripe(byte[] stripe, int offset, int length,
StripeInformation stripeInfo,
StripeStatistics[] stripeStatistics
) throws IOException {
checkArgument(stripe != null, "Stripe must not be null");
checkArgument(length <= stripe.length,
"Specified length must not be greater specified array length");
checkArgument(stripeInfo != null, "Stripe information must not be null");
checkArgument(stripeStatistics != null,
"Stripe statistics must not be null");
// If we have buffered rows, flush them
if (rowsInStripe > 0) {
flushStripe();
}
rowsInStripe = stripeInfo.getNumberOfRows();
// update stripe information
OrcProto.StripeInformation.Builder dirEntry =
OrcProto.StripeInformation.newBuilder()
.setNumberOfRows(rowsInStripe)
.setIndexLength(stripeInfo.getIndexLength())
.setDataLength(stripeInfo.getDataLength())
.setFooterLength(stripeInfo.getFooterLength());
// If this is the first stripe of the original file, we need to copy the
// encryption information.
if (stripeInfo.hasEncryptionStripeId()) {
dirEntry.setEncryptStripeId(stripeInfo.getEncryptionStripeId());
for(byte[] key: stripeInfo.getEncryptedLocalKeys()) {
dirEntry.addEncryptedLocalKeys(ByteString.copyFrom(key));
}
}
physicalWriter.appendRawStripe(ByteBuffer.wrap(stripe, offset, length),
dirEntry);
// since we have already written the stripe, just update stripe statistics
treeWriter.addStripeStatistics(stripeStatistics);
stripes.add(dirEntry.build());
// reset it after writing the stripe
rowCount += rowsInStripe;
rowsInStripe = 0;
needKeyFlush = encryption.length > 0;
}
@Override
public void appendUserMetadata(List<OrcProto.UserMetadataItem> userMetadata) {
if (userMetadata != null) {
for (OrcProto.UserMetadataItem item : userMetadata) {
this.userMetadata.put(item.getName(), item.getValue());
}
}
}
@Override
public ColumnStatistics[] getStatistics() {
// get the column statistics
final ColumnStatistics[] result =
new ColumnStatistics[schema.getMaximumId() + 1];
// Get the file statistics, preferring the encrypted one.
treeWriter.getCurrentStatistics(result);
return result;
}
@Override
public List<StripeInformation> getStripes() throws IOException {
return Collections.unmodifiableList(OrcUtils.convertProtoStripesToStripes(stripes));
}
public CompressionCodec getCompressionCodec() {
return unencryptedOptions.getCodec();
}
private static boolean hasTimestamp(TypeDescription schema) {
if (schema.getCategory() == TypeDescription.Category.TIMESTAMP) {
return true;
}
List<TypeDescription> children = schema.getChildren();
if (children != null) {
for (TypeDescription child : children) {
if (hasTimestamp(child)) {
return true;
}
}
}
return false;
}
private WriterEncryptionKey getKey(String keyName,
KeyProvider provider) throws IOException {
WriterEncryptionKey result = keys.get(keyName);
if (result == null) {
result = new WriterEncryptionKey(provider.getCurrentKeyVersion(keyName));
keys.put(keyName, result);
}
return result;
}
private MaskDescriptionImpl getMask(String maskString) {
// if it is already there, get the earlier object
MaskDescriptionImpl result = maskDescriptions.get(maskString);
if (result == null) {
result = ParserUtils.buildMaskDescription(maskString);
maskDescriptions.put(maskString, result);
}
return result;
}
private int visitTypeTree(TypeDescription schema,
boolean encrypted,
KeyProvider provider) throws IOException {
int result = 0;
String keyName = schema.getAttributeValue(TypeDescription.ENCRYPT_ATTRIBUTE);
String maskName = schema.getAttributeValue(TypeDescription.MASK_ATTRIBUTE);
if (keyName != null) {
if (provider == null) {
throw new IllegalArgumentException("Encryption requires a KeyProvider.");
}
if (encrypted) {
throw new IllegalArgumentException("Nested encryption type: " + schema);
}
encrypted = true;
result += 1;
WriterEncryptionKey key = getKey(keyName, provider);
HadoopShims.KeyMetadata metadata = key.getMetadata();
WriterEncryptionVariant variant = new WriterEncryptionVariant(key,
schema, provider.createLocalKey(metadata));
key.addRoot(variant);
}
if (encrypted && (keyName != null || maskName != null)) {
MaskDescriptionImpl mask = getMask(maskName == null ? "nullify" : maskName);
mask.addColumn(schema);
}
List<TypeDescription> children = schema.getChildren();
if (children != null) {
for(TypeDescription child: children) {
result += visitTypeTree(child, encrypted, provider);
}
}
return result;
}
/**
* Iterate through the encryption options given by the user and set up
* our data structures.
* @param provider the KeyProvider to use to generate keys
* @param schema the type tree that we search for annotations
* @param keyOverrides user specified key overrides
*/
private WriterEncryptionVariant[] setupEncryption(
KeyProvider provider,
TypeDescription schema,
Map<String, HadoopShims.KeyMetadata> keyOverrides) throws IOException {
keyProvider = provider != null ? provider :
CryptoUtils.getKeyProvider(conf, new SecureRandom());
// Load the overrides into the cache so that we use the required key versions.
for(HadoopShims.KeyMetadata key: keyOverrides.values()) {
keys.put(key.getKeyName(), new WriterEncryptionKey(key));
}
int variantCount = visitTypeTree(schema, false, keyProvider);
// Now that we have de-duped the keys and maskDescriptions, make the arrays
int nextId = 0;
if (variantCount > 0) {
for (MaskDescriptionImpl mask : maskDescriptions.values()) {
mask.setId(nextId++);
for (TypeDescription column : mask.getColumns()) {
this.columnMaskDescriptions[column.getId()] = mask;
}
}
}
nextId = 0;
int nextVariantId = 0;
WriterEncryptionVariant[] result = new WriterEncryptionVariant[variantCount];
for(WriterEncryptionKey key: keys.values()) {
key.setId(nextId++);
key.sortRoots();
for(WriterEncryptionVariant variant: key.getEncryptionRoots()) {
result[nextVariantId] = variant;
columnEncryption[variant.getRoot().getId()] = variant;
variant.setId(nextVariantId++);
}
}
return result;
}
@Override
public long estimateMemory() {
return this.treeWriter.estimateMemory();
}
}
|
apache/hadoop | 37,180 | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestPread.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_MAX_BLOCK_ACQUIRE_FAILURES_KEY;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import java.io.EOFException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ChecksumException;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtocol;
import org.apache.hadoop.hdfs.server.datanode.SimulatedFSDataset;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import java.util.function.Supplier;
import org.slf4j.LoggerFactory;
import org.slf4j.Logger;
import org.slf4j.event.Level;
/**
* This class tests the DFS positional read functionality in a single node
* mini-cluster.
*/
public class TestPread {
static final long seed = 0xDEADBEEFL;
static final int blockSize = 4096;
static final int numBlocksPerFile = 12;
static final int fileSize = numBlocksPerFile * blockSize;
boolean simulatedStorage;
boolean isHedgedRead;
private static final Logger LOG =
LoggerFactory.getLogger(TestPread.class.getName());
private final GenericTestUtils.LogCapturer dfsClientLog =
GenericTestUtils.LogCapturer.captureLogs(DFSClient.LOG);
@BeforeAll
public static void setLogLevel() {
GenericTestUtils.setLogLevel(DFSClient.LOG, org.apache.log4j.Level.WARN);
}
@BeforeEach
public void setup() {
simulatedStorage = false;
isHedgedRead = false;
}
private void writeFile(FileSystem fileSys, Path name) throws IOException {
int replication = 3;// We need > 1 blocks to test out the hedged reads.
// test empty file open and read
DFSTestUtil.createFile(fileSys, name, fileSize, 0,
blockSize, (short)replication, seed);
FSDataInputStream in = fileSys.open(name);
byte[] buffer = new byte[fileSize];
in.readFully(0, buffer, 0, 0);
IOException res = null;
try { // read beyond the end of the file
in.readFully(0, buffer, 0, 1);
} catch (IOException e) {
// should throw an exception
res = e;
}
assertTrue(res != null, "Error reading beyond file boundary.");
in.close();
if (!fileSys.delete(name, true))
assertTrue(false, "Cannot delete file");
// now create the real file
DFSTestUtil.createFile(fileSys, name, fileSize, fileSize,
blockSize, (short) replication, seed);
}
private void checkAndEraseData(byte[] actual, int from, byte[] expected, String message) {
for (int idx = 0; idx < actual.length; idx++) {
assertEquals(actual[idx], expected[from + idx],
message + " byte " + (from + idx)
+ " differs. expected " + expected[from + idx] + " actual " + actual[idx]);
actual[idx] = 0;
}
}
private void doPread(FSDataInputStream stm, long position, byte[] buffer,
int offset, int length) throws IOException {
int nread = 0;
long totalRead = 0;
DFSInputStream dfstm = null;
if (stm.getWrappedStream() instanceof DFSInputStream) {
dfstm = (DFSInputStream) (stm.getWrappedStream());
totalRead = dfstm.getReadStatistics().getTotalBytesRead();
}
while (nread < length) {
int nbytes =
stm.read(position + nread, buffer, offset + nread, length - nread);
assertTrue(nbytes > 0, "Error in pread");
nread += nbytes;
}
if (dfstm != null) {
if (isHedgedRead) {
assertTrue(length <= dfstm.getReadStatistics().getTotalBytesRead() - totalRead,
"Expected read statistic to be incremented");
} else {
assertEquals(length, dfstm.getReadStatistics().getTotalBytesRead() - totalRead,
"Expected read statistic to be incremented");
}
}
}
private void pReadFile(FileSystem fileSys, Path name) throws IOException {
FSDataInputStream stm = fileSys.open(name);
byte[] expected = new byte[fileSize];
if (simulatedStorage) {
assert fileSys instanceof DistributedFileSystem;
DistributedFileSystem dfs = (DistributedFileSystem) fileSys;
LocatedBlocks lbs = dfs.getClient().getLocatedBlocks(name.toString(),
0, fileSize);
DFSTestUtil.fillExpectedBuf(lbs, expected);
} else {
Random rand = new Random(seed);
rand.nextBytes(expected);
}
// do a sanity check. Read first 4K bytes
byte[] actual = new byte[4096];
stm.readFully(actual);
checkAndEraseData(actual, 0, expected, "Read Sanity Test");
// now do a pread for the first 8K bytes
actual = new byte[8192];
doPread(stm, 0L, actual, 0, 8192);
checkAndEraseData(actual, 0, expected, "Pread Test 1");
// Now check to see if the normal read returns 4K-8K byte range
actual = new byte[4096];
stm.readFully(actual);
checkAndEraseData(actual, 4096, expected, "Pread Test 2");
// Now see if we can cross a single block boundary successfully
// read 4K bytes from blockSize - 2K offset
stm.readFully(blockSize - 2048, actual, 0, 4096);
checkAndEraseData(actual, (blockSize - 2048), expected, "Pread Test 3");
// now see if we can cross two block boundaries successfully
// read blockSize + 4K bytes from blockSize - 2K offset
actual = new byte[blockSize + 4096];
stm.readFully(blockSize - 2048, actual);
checkAndEraseData(actual, (blockSize - 2048), expected, "Pread Test 4");
// now see if we can cross two block boundaries that are not cached
// read blockSize + 4K bytes from 10*blockSize - 2K offset
actual = new byte[blockSize + 4096];
stm.readFully(10 * blockSize - 2048, actual);
checkAndEraseData(actual, (10 * blockSize - 2048), expected, "Pread Test 5");
// now check that even after all these preads, we can still read
// bytes 8K-12K
actual = new byte[4096];
stm.readFully(actual);
checkAndEraseData(actual, 8192, expected, "Pread Test 6");
// done
stm.close();
// check block location caching
stm = fileSys.open(name);
stm.readFully(1, actual, 0, 4096);
stm.readFully(4*blockSize, actual, 0, 4096);
stm.readFully(7*blockSize, actual, 0, 4096);
actual = new byte[3*4096];
stm.readFully(0*blockSize, actual, 0, 3*4096);
checkAndEraseData(actual, 0, expected, "Pread Test 7");
actual = new byte[8*4096];
stm.readFully(3*blockSize, actual, 0, 8*4096);
checkAndEraseData(actual, 3*blockSize, expected, "Pread Test 8");
// read the tail
stm.readFully(11*blockSize+blockSize/2, actual, 0, blockSize/2);
IOException res = null;
try { // read beyond the end of the file
stm.readFully(11*blockSize+blockSize/2, actual, 0, blockSize);
} catch (IOException e) {
// should throw an exception
res = e;
}
assertTrue(res != null, "Error reading beyond file boundary.");
stm.close();
}
// test pread can survive datanode restarts
private void datanodeRestartTest(MiniDFSCluster cluster, FileSystem fileSys,
Path name) throws IOException {
// skip this test if using simulated storage since simulated blocks
// don't survive datanode restarts.
if (simulatedStorage) {
return;
}
int numBlocks = 1;
assertTrue(numBlocks <= HdfsClientConfigKeys.DFS_CLIENT_MAX_BLOCK_ACQUIRE_FAILURES_DEFAULT);
byte[] expected = new byte[numBlocks * blockSize];
Random rand = new Random(seed);
rand.nextBytes(expected);
byte[] actual = new byte[numBlocks * blockSize];
FSDataInputStream stm = fileSys.open(name);
// read a block and get block locations cached as a result
stm.readFully(0, actual);
checkAndEraseData(actual, 0, expected, "Pread Datanode Restart Setup");
// restart all datanodes. it is expected that they will
// restart on different ports, hence, cached block locations
// will no longer work.
assertTrue(cluster.restartDataNodes());
cluster.waitActive();
// verify the block can be read again using the same InputStream
// (via re-fetching of block locations from namenode). there is a
// 3 sec sleep in chooseDataNode(), which can be shortened for
// this test if configurable.
stm.readFully(0, actual);
checkAndEraseData(actual, 0, expected, "Pread Datanode Restart Test");
}
private void cleanupFile(FileSystem fileSys, Path name) throws IOException {
assertTrue(fileSys.exists(name));
assertTrue(fileSys.delete(name, true));
assertTrue(!fileSys.exists(name));
}
private Callable<Void> getPReadFileCallable(final FileSystem fileSys,
final Path file) {
return new Callable<Void>() {
public Void call() throws IOException {
pReadFile(fileSys, file);
return null;
}
};
}
/**
* Tests positional read in DFS.
*/
@Test
public void testPreadDFS() throws IOException {
Configuration conf = new Configuration();
dfsPreadTest(conf, false, true); // normal pread
dfsPreadTest(conf, true, true); // trigger read code path without
// transferTo.
}
@Test
public void testPreadDFSNoChecksum() throws IOException {
Configuration conf = new Configuration();
GenericTestUtils.setLogLevel(DataTransferProtocol.LOG, Level.TRACE);
dfsPreadTest(conf, false, false);
dfsPreadTest(conf, true, false);
}
/**
* Tests positional read in DFS, with hedged reads enabled.
*/
@Test
public void testHedgedPreadDFSBasic() throws IOException {
isHedgedRead = true;
Configuration conf = new Configuration();
conf.setInt(HdfsClientConfigKeys.HedgedRead.THREADPOOL_SIZE_KEY, 5);
conf.setLong(HdfsClientConfigKeys.HedgedRead.THRESHOLD_MILLIS_KEY, 1);
dfsPreadTest(conf, false, true); // normal pread
dfsPreadTest(conf, true, true); // trigger read code path without
// transferTo.
}
@Test
public void testHedgedReadLoopTooManyTimes() throws IOException {
Configuration conf = new Configuration();
int numHedgedReadPoolThreads = 5;
final int hedgedReadTimeoutMillis = 50;
conf.setInt(HdfsClientConfigKeys.HedgedRead.THREADPOOL_SIZE_KEY,
numHedgedReadPoolThreads);
conf.setLong(HdfsClientConfigKeys.HedgedRead.THRESHOLD_MILLIS_KEY,
hedgedReadTimeoutMillis);
conf.setInt(HdfsClientConfigKeys.Retry.WINDOW_BASE_KEY, 0);
// Set up the InjectionHandler
DFSClientFaultInjector.set(Mockito.mock(DFSClientFaultInjector.class));
DFSClientFaultInjector injector = DFSClientFaultInjector.get();
final int sleepMs = 100;
Mockito.doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
if (true) {
Thread.sleep(hedgedReadTimeoutMillis + sleepMs);
if (DFSClientFaultInjector.exceptionNum.compareAndSet(0, 1)) {
System.out.println("-------------- throw Checksum Exception");
throw new ChecksumException("ChecksumException test", 100);
}
}
return null;
}
}).when(injector).fetchFromDatanodeException();
Mockito.doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
if (true) {
Thread.sleep(sleepMs * 2);
}
return null;
}
}).when(injector).readFromDatanodeDelay();
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2)
.format(true).build();
DistributedFileSystem fileSys = cluster.getFileSystem();
DFSClient dfsClient = fileSys.getClient();
FSDataOutputStream output = null;
DFSInputStream input = null;
String filename = "/hedgedReadMaxOut.dat";
try {
Path file = new Path(filename);
output = fileSys.create(file, (short) 2);
byte[] data = new byte[64 * 1024];
output.write(data);
output.flush();
output.write(data);
output.flush();
output.write(data);
output.flush();
output.close();
byte[] buffer = new byte[64 * 1024];
input = dfsClient.open(filename);
input.read(0, buffer, 0, 1024);
input.close();
assertEquals(3, input.getHedgedReadOpsLoopNumForTesting());
} catch (BlockMissingException e) {
assertTrue(false);
} finally {
Mockito.reset(injector);
IOUtils.cleanupWithLogger(null, input);
IOUtils.cleanupWithLogger(null, output);
fileSys.close();
cluster.shutdown();
}
}
@Test
public void testMaxOutHedgedReadPool() throws IOException,
InterruptedException, ExecutionException {
isHedgedRead = true;
Configuration conf = new Configuration();
int numHedgedReadPoolThreads = 5;
final int initialHedgedReadTimeoutMillis = 50000;
final int fixedSleepIntervalMillis = 50;
conf.setInt(HdfsClientConfigKeys.HedgedRead.THREADPOOL_SIZE_KEY,
numHedgedReadPoolThreads);
conf.setLong(HdfsClientConfigKeys.HedgedRead.THRESHOLD_MILLIS_KEY,
initialHedgedReadTimeoutMillis);
// Set up the InjectionHandler
DFSClientFaultInjector.set(Mockito.mock(DFSClientFaultInjector.class));
DFSClientFaultInjector injector = DFSClientFaultInjector.get();
// make preads sleep for 50ms
Mockito.doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
Thread.sleep(fixedSleepIntervalMillis);
return null;
}
}).when(injector).startFetchFromDatanode();
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3)
.format(true).build();
DistributedFileSystem fileSys = cluster.getFileSystem();
DFSClient dfsClient = fileSys.getClient();
DFSHedgedReadMetrics metrics = dfsClient.getHedgedReadMetrics();
// Metrics instance is static, so we need to reset counts from prior tests.
metrics.hedgedReadOps.reset();
metrics.hedgedReadOpsWin.reset();
metrics.hedgedReadOpsInCurThread.reset();
try {
Path file1 = new Path("hedgedReadMaxOut.dat");
writeFile(fileSys, file1);
// Basic test. Reads complete within timeout. Assert that there were no
// hedged reads.
pReadFile(fileSys, file1);
// assert that there were no hedged reads. 50ms + delta < 500ms
assertTrue(metrics.getHedgedReadOps() == 0);
assertTrue(metrics.getHedgedReadOpsInCurThread() == 0);
/*
* Reads take longer than timeout. But, only one thread reading. Assert
* that there were hedged reads. But, none of the reads had to run in the
* current thread.
*/
{
Configuration conf2 = new Configuration(cluster.getConfiguration(0));
conf2.setBoolean("fs.hdfs.impl.disable.cache", true);
conf2.setLong(HdfsClientConfigKeys.HedgedRead.THRESHOLD_MILLIS_KEY, 50);
fileSys.close();
fileSys = (DistributedFileSystem)FileSystem.get(cluster.getURI(0), conf2);
metrics = fileSys.getClient().getHedgedReadMetrics();
}
pReadFile(fileSys, file1);
// assert that there were hedged reads
assertTrue(metrics.getHedgedReadOps() > 0);
assertTrue(metrics.getHedgedReadOpsInCurThread() == 0);
/*
* Multiple threads reading. Reads take longer than timeout. Assert that
* there were hedged reads. And that reads had to run in the current
* thread.
*/
int factor = 10;
int numHedgedReads = numHedgedReadPoolThreads * factor;
long initialReadOpsValue = metrics.getHedgedReadOps();
ExecutorService executor = Executors.newFixedThreadPool(numHedgedReads);
ArrayList<Future<Void>> futures = new ArrayList<Future<Void>>();
for (int i = 0; i < numHedgedReads; i++) {
futures.add(executor.submit(getPReadFileCallable(fileSys, file1)));
}
for (int i = 0; i < numHedgedReads; i++) {
futures.get(i).get();
}
assertTrue(metrics.getHedgedReadOps() > initialReadOpsValue);
assertTrue(metrics.getHedgedReadOpsInCurThread() > 0);
cleanupFile(fileSys, file1);
executor.shutdown();
} finally {
fileSys.close();
cluster.shutdown();
Mockito.reset(injector);
}
}
private void dfsPreadTest(Configuration conf, boolean disableTransferTo, boolean verifyChecksum)
throws IOException {
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, 4096);
conf.setLong(HdfsClientConfigKeys.Read.PREFETCH_SIZE_KEY, 4096);
// Set short retry timeouts so this test runs faster
conf.setInt(HdfsClientConfigKeys.Retry.WINDOW_BASE_KEY, 0);
if (simulatedStorage) {
SimulatedFSDataset.setFactory(conf);
}
if (disableTransferTo) {
conf.setBoolean("dfs.datanode.transferTo.allowed", false);
}
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
FileSystem fileSys = cluster.getFileSystem();
fileSys.setVerifyChecksum(verifyChecksum);
try {
Path file1 = new Path("/preadtest.dat");
writeFile(fileSys, file1);
pReadFile(fileSys, file1);
datanodeRestartTest(cluster, fileSys, file1);
cleanupFile(fileSys, file1);
} finally {
fileSys.close();
cluster.shutdown();
}
}
@Test
public void testPreadDFSSimulated() throws IOException {
simulatedStorage = true;
testPreadDFS();
}
/**
* Tests positional read in LocalFS.
*/
@Test
public void testPreadLocalFS() throws IOException {
Configuration conf = new HdfsConfiguration();
FileSystem fileSys = FileSystem.getLocal(conf);
try {
Path file1 = new Path(GenericTestUtils.getTempPath("preadtest.dat"));
writeFile(fileSys, file1);
pReadFile(fileSys, file1);
cleanupFile(fileSys, file1);
} finally {
fileSys.close();
}
}
@Test
public void testTruncateWhileReading() throws Exception {
Path path = new Path("/testfile");
final int blockSize = 512;
// prevent initial pre-fetch of multiple block locations
Configuration conf = new Configuration();
conf.setLong(HdfsClientConfigKeys.Read.PREFETCH_SIZE_KEY, blockSize);
MiniDFSCluster cluster =
new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
try {
DistributedFileSystem fs = cluster.getFileSystem();
// create multi-block file
FSDataOutputStream dos =
fs.create(path, true, blockSize, (short)1, blockSize);
dos.write(new byte[blockSize*3]);
dos.close();
// truncate a file while it's open
final FSDataInputStream dis = fs.open(path);
while (!fs.truncate(path, 10)) {
Thread.sleep(10);
}
// verify that reading bytes outside the initial pre-fetch do
// not send the client into an infinite loop querying locations.
ExecutorService executor = Executors.newFixedThreadPool(1);
Future<?> future = executor.submit(new Callable<Void>() {
@Override
public Void call() throws IOException {
// read from 2nd block.
dis.readFully(blockSize, new byte[4]);
return null;
}
});
try {
future.get(4, TimeUnit.SECONDS);
Assertions.fail();
} catch (ExecutionException ee) {
assertTrue(ee.getCause() instanceof EOFException, ee.toString());
} finally {
future.cancel(true);
executor.shutdown();
}
} finally {
cluster.shutdown();
}
}
/**
* Test logging in getFromOneDataNode when the number of IOExceptions can be recovered by
* retrying on a different datanode or by refreshing data nodes and retrying each data node one
* more time.
*/
@Test
@Timeout(value = 120)
public void testGetFromOneDataNodeExceptionLogging() throws IOException {
// With maxBlockAcquireFailures = 0, we would try on each datanode only once and if
// we fail on all three datanodes, we fail the read request.
testGetFromOneDataNodeExceptionLogging(0, 0);
testGetFromOneDataNodeExceptionLogging(1, 0);
testGetFromOneDataNodeExceptionLogging(2, 0);
// With maxBlockAcquireFailures = 1, we will re-try each datanode a second time.
// So, we can tolerate up to 5 datanode fetch failures.
testGetFromOneDataNodeExceptionLogging(3, 1);
testGetFromOneDataNodeExceptionLogging(4, 1);
testGetFromOneDataNodeExceptionLogging(5, 1);
}
/**
* Each failed IOException would result in a WARN log of "Failed to connect to XXX. Retry with
* the next available datanode.". We verify the number of such log lines match the number of
* failed DNs.
* <p>
* @param ioExceptions number of IOExceptions to throw during a test.
* @param maxBlockAcquireFailures number of refreshLocation we would perform once we mark
* all current data nodes as dead.
*/
private void testGetFromOneDataNodeExceptionLogging(final int ioExceptions,
int maxBlockAcquireFailures)
throws IOException {
dfsClientLog.clearOutput();
if (ioExceptions < 0 || ioExceptions >= 3 * (maxBlockAcquireFailures+1)) {
return;
}
Configuration conf = new Configuration();
conf.setInt(DFS_CLIENT_MAX_BLOCK_ACQUIRE_FAILURES_KEY, maxBlockAcquireFailures);
final int[] count = {0};
// Set up the InjectionHandler
DFSClientFaultInjector.set(Mockito.mock(DFSClientFaultInjector.class));
DFSClientFaultInjector injector = DFSClientFaultInjector.get();
Mockito.doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
if (count[0] < ioExceptions) {
LOG.info("-------------- throw IOException " + count[0]);
count[0]++;
throw new IOException("IOException test");
}
return null;
}
}).when(injector).fetchFromDatanodeException();
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).format(true).build();
DistributedFileSystem fileSys = cluster.getFileSystem();
DFSClient dfsClient = fileSys.getClient();
DFSInputStream input = null;
Path file = new Path("/testfile.dat");
try {
DFSTestUtil.createFile(fileSys, file, fileSize, fileSize, blockSize, (short) 3, seed);
byte[] buffer = new byte[fileSize];
input = dfsClient.open(file.toString());
input.read(0, buffer, 0, fileSize);
assertEquals(ioExceptions, StringUtils.countMatches(dfsClientLog.getOutput(),
"Retry with the next available datanode."));
} finally {
Mockito.reset(injector);
IOUtils.cleanupWithLogger(LOG, input);
fileSys.close();
cluster.shutdown();
dfsClientLog.clearOutput();
}
}
/**
* Test the case where we always hit IOExceptions, causing the read request to fail.
*/
@Test
@Timeout(value = 60)
public void testFetchFromDataNodeExceptionLoggingFailedRequest()
throws IOException {
testFetchFromDataNodeExceptionLoggingFailedRequest(0);
testFetchFromDataNodeExceptionLoggingFailedRequest(1);
}
/**
* We verify that BlockMissingException is threw and there is one ERROR log line of
* "Failed to read from all available datanodes for file"
* and 3 * (maxBlockAcquireFailures+1) ERROR log lines of
* "Exception when fetching file /testfile.dat at position".
* <p>
* maxBlockAcquireFailures determines how many times we can retry when we fail to read from
* all three data nodes.
* <ul>
* <li>maxBlockAcquireFailures = 0: no retry. We will only read from each of the three
* data nodes only once. We expect to see 3 ERROR log lines of "Exception when fetching file
* /testfile.dat at position".
* </li>
* <li>maxBlockAcquireFailures = 1: 1 retry. We will read from each of the three data
* nodes twice. We expect to see 6 ERROR log lines of "Exception when fetching file
* /testfile.dat at position".
* </li>
* </ul>
*/
private void testFetchFromDataNodeExceptionLoggingFailedRequest(int maxBlockAcquireFailures)
throws IOException {
dfsClientLog.clearOutput();
Configuration conf = new Configuration();
conf.setInt(DFS_CLIENT_MAX_BLOCK_ACQUIRE_FAILURES_KEY, maxBlockAcquireFailures);
// Set up the InjectionHandler
DFSClientFaultInjector.set(Mockito.mock(DFSClientFaultInjector.class));
DFSClientFaultInjector injector = DFSClientFaultInjector.get();
Mockito.doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
LOG.info("-------------- throw IOException ");
throw new IOException("IOException test");
}
}).when(injector).fetchFromDatanodeException();
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).format(true).build();
DistributedFileSystem fileSys = cluster.getFileSystem();
DFSClient dfsClient = fileSys.getClient();
DFSInputStream input = null;
Path file = new Path("/testfile.dat");
try {
DFSTestUtil.createFile(fileSys, file, fileSize, fileSize, blockSize, (short) 3, seed);
byte[] buffer = new byte[fileSize];
input = dfsClient.open(file.toString());
input.read(0, buffer, 0, fileSize);
fail();
} catch (BlockMissingException expected) {
// Logging from pread
assertEquals(1, StringUtils.countMatches(dfsClientLog.getOutput(),
"Failed to read from all available datanodes for file"));
assertEquals(3 * (maxBlockAcquireFailures + 1),
StringUtils.countMatches(dfsClientLog.getOutput(),
"Exception when fetching file /testfile.dat at position"));
// Logging from actualGetFromOneDataNode
assertEquals(3 * (maxBlockAcquireFailures + 1),
StringUtils.countMatches(dfsClientLog.getOutput(),
"Retry with the next available datanode."));
} finally {
Mockito.reset(injector);
IOUtils.cleanupWithLogger(LOG, input);
fileSys.close();
cluster.shutdown();
dfsClientLog.clearOutput();
}
}
@Test
@Timeout(value = 30)
public void testHedgedReadFromAllDNFailed() throws IOException {
Configuration conf = new Configuration();
int numHedgedReadPoolThreads = 5;
final int hedgedReadTimeoutMillis = 50;
conf.setInt(HdfsClientConfigKeys.HedgedRead.THREADPOOL_SIZE_KEY,
numHedgedReadPoolThreads);
conf.setLong(HdfsClientConfigKeys.HedgedRead.THRESHOLD_MILLIS_KEY,
hedgedReadTimeoutMillis);
conf.setInt(HdfsClientConfigKeys.Retry.WINDOW_BASE_KEY, 0);
// Set up the InjectionHandler
DFSClientFaultInjector.set(Mockito.mock(DFSClientFaultInjector.class));
DFSClientFaultInjector injector = DFSClientFaultInjector.get();
Mockito.doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
if (true) {
LOG.info("-------------- throw Checksum Exception");
throw new ChecksumException("ChecksumException test", 100);
}
return null;
}
}).when(injector).fetchFromDatanodeException();
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3)
.format(true).build();
DistributedFileSystem fileSys = cluster.getFileSystem();
DFSClient dfsClient = fileSys.getClient();
FSDataOutputStream output = null;
DFSInputStream input = null;
String filename = "/hedgedReadMaxOut.dat";
DFSHedgedReadMetrics metrics = dfsClient.getHedgedReadMetrics();
// Metrics instance is static, so we need to reset counts from prior tests.
metrics.hedgedReadOps.reset();
try {
Path file = new Path(filename);
output = fileSys.create(file, (short) 2);
byte[] data = new byte[64 * 1024];
output.write(data);
output.flush();
output.close();
byte[] buffer = new byte[64 * 1024];
input = dfsClient.open(filename);
input.read(0, buffer, 0, 1024);
Assertions.fail("Reading the block should have thrown BlockMissingException");
} catch (BlockMissingException e) {
// The result of 9 is due to 2 blocks by 4 iterations plus one because
// hedgedReadOpsLoopNumForTesting is incremented at start of the loop.
assertEquals(9, input.getHedgedReadOpsLoopNumForTesting());
assertTrue(metrics.getHedgedReadOps() == 0);
} finally {
Mockito.reset(injector);
IOUtils.cleanupWithLogger(LOG, input);
IOUtils.cleanupWithLogger(LOG, output);
fileSys.close();
cluster.shutdown();
}
}
/**
* Scenario: 1. Write a file with RF=2, DN1 and DN2<br>
* 2. Open the stream, Consider Locations are [DN1, DN2] in LocatedBlock.<br>
* 3. Move block from DN2 to DN3.<br>
* 4. Let block gets replicated to another DN3<br>
* 5. Stop DN1 also.<br>
* 6. Current valid Block locations in NameNode [DN1, DN3]<br>
* 7. Consider next calls to getBlockLocations() always returns DN3 as last
* location.<br>
*/
@Test
public void testPreadFailureWithChangedBlockLocations() throws Exception {
doPreadTestWithChangedLocations(1);
}
/**
* Scenario: 1. Write a file with RF=2, DN1 and DN2<br>
* 2. Open the stream, Consider Locations are [DN1, DN2] in LocatedBlock.<br>
* 3. Move block from DN2 to DN3.<br>
* 4. Let block gets replicated to another DN3<br>
* 5. Stop DN1 also.<br>
* 6. Current valid Block locations in NameNode [DN1, DN3]<br>
* 7. Consider next calls to getBlockLocations() always returns DN3 as last
* location.<br>
*/
@Test
@Timeout(value = 60)
public void testPreadHedgedFailureWithChangedBlockLocations()
throws Exception {
isHedgedRead = true;
DFSClientFaultInjector old = DFSClientFaultInjector.get();
try {
DFSClientFaultInjector.set(new DFSClientFaultInjector() {
public void sleepBeforeHedgedGet() {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
}
}
});
doPreadTestWithChangedLocations(2);
} finally {
DFSClientFaultInjector.set(old);
}
}
private void doPreadTestWithChangedLocations(int maxFailures)
throws IOException, TimeoutException, InterruptedException {
GenericTestUtils.setLogLevel(DFSClient.LOG, Level.DEBUG);
Configuration conf = new HdfsConfiguration();
conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 2);
conf.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1);
if (isHedgedRead) {
conf.setInt(HdfsClientConfigKeys.HedgedRead.THRESHOLD_MILLIS_KEY, 100);
conf.setInt(HdfsClientConfigKeys.HedgedRead.THREADPOOL_SIZE_KEY, 2);
conf.setInt(HdfsClientConfigKeys.Retry.WINDOW_BASE_KEY, 1000);
}
try (MiniDFSCluster cluster =
new MiniDFSCluster.Builder(conf).numDataNodes(3).build()) {
DistributedFileSystem dfs = cluster.getFileSystem();
Path p = new Path("/test");
String data = "testingmissingblock";
DFSTestUtil.writeFile(dfs, p, data);
DFSTestUtil.waitForReplication(dfs, p, (short) 2, 10000);
FSDataInputStream in = dfs.open(p);
List<LocatedBlock> blocks = DFSTestUtil.getAllBlocks(in);
LocatedBlock lb = blocks.get(0);
blocks = DFSTestUtil.getAllBlocks(in);
DatanodeInfo[] locations = null;
for (LocatedBlock locatedBlock : blocks) {
locations = locatedBlock.getLocations();
DFSClient.LOG
.info(locatedBlock.getBlock() + " " + Arrays.toString(locations));
}
final DatanodeInfo validDownLocation = locations[0];
final DFSClient client = dfs.getClient();
DFSClient dfsClient = Mockito.spy(client);
// Keep the valid location as last in the locations list for second
// requests
// onwards.
final AtomicInteger count = new AtomicInteger(0);
Mockito.doAnswer(new Answer<LocatedBlocks>() {
@Override
public LocatedBlocks answer(InvocationOnMock invocation)
throws Throwable {
if (count.compareAndSet(0, 1)) {
return (LocatedBlocks) invocation.callRealMethod();
}
Object obj = invocation.callRealMethod();
LocatedBlocks locatedBlocks = (LocatedBlocks) obj;
LocatedBlock lb = locatedBlocks.get(0);
DatanodeInfo[] locations = lb.getLocations();
if (!(locations[0].getName().equals(validDownLocation.getName()))) {
// Latest location which is currently down, should be first
DatanodeInfo l = locations[0];
locations[0] = locations[locations.length - 1];
locations[locations.length - 1] = l;
}
return locatedBlocks;
}
}).when(dfsClient).getLocatedBlocks(p.toString(), 0);
// Findout target node to move the block to.
DatanodeInfo[] nodes =
cluster.getNameNodeRpc().getDatanodeReport(DatanodeReportType.LIVE);
DatanodeInfo toMove = null;
List<DatanodeInfo> locationsList = Arrays.asList(locations);
for (DatanodeInfo node : nodes) {
if (locationsList.contains(node)) {
continue;
}
toMove = node;
break;
}
// STEP 2: Open stream
DFSInputStream din = dfsClient.open(p.toString());
// STEP 3: Move replica
final DatanodeInfo source = locations[1];
final DatanodeInfo destination = toMove;
DFSTestUtil.replaceBlock(lb.getBlock(), source, locations[1], toMove);
// Wait for replica to get deleted
GenericTestUtils.waitFor(new Supplier<Boolean>() {
@Override
public Boolean get() {
try {
LocatedBlocks lbs = dfsClient.getLocatedBlocks(p.toString(), 0);
LocatedBlock lb = lbs.get(0);
List<DatanodeInfo> locations = Arrays.asList(lb.getLocations());
DFSClient.LOG
.info("Source :" + source + ", destination: " + destination);
DFSClient.LOG.info("Got updated locations :" + locations);
return locations.contains(destination)
&& !locations.contains(source);
} catch (IOException e) {
DFSClient.LOG.error("Problem in getting block locations", e);
}
return null;
}
}, 1000, 10000);
DFSTestUtil.waitForReplication(cluster, lb.getBlock(), 1, 2, 0);
// STEP 4: Stop first node in new locations
cluster.stopDataNode(validDownLocation.getName());
DFSClient.LOG.info("Starting read");
byte[] buf = new byte[1024];
int n = din.read(0, buf, 0, data.length());
assertEquals(data.length(), n);
assertEquals(data, new String(buf, 0, n), "Data should be read");
assertTrue(din.failures <= maxFailures,
"Read should complete with maximum " + maxFailures
+ " failures, but completed with " + din.failures);
DFSClient.LOG.info("Read completed");
}
}
public static void main(String[] args) throws Exception {
new TestPread().testPreadDFS();
}
}
|
apache/ozone | 37,417 | hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/TestBlockOutputStream.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ozone.client.rpc;
import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Type.PutBlock;
import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Type.WriteChunk;
import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_DATANODE_PIPELINE_LIMIT;
import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_DEADNODE_INTERVAL;
import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_STALENODE_INTERVAL;
import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_SCM_BLOCK_SIZE;
import static org.apache.hadoop.ozone.container.TestHelper.validateData;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertInstanceOf;
import java.io.IOException;
import java.time.Duration;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.stream.Stream;
import org.apache.commons.lang3.RandomUtils;
import org.apache.hadoop.hdds.client.ReplicationFactor;
import org.apache.hadoop.hdds.client.ReplicationType;
import org.apache.hadoop.hdds.conf.ConfigurationSource;
import org.apache.hadoop.hdds.conf.DatanodeRatisServerConfig;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.conf.StorageUnit;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ChecksumType;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.hdds.ratis.conf.RatisClientConfig;
import org.apache.hadoop.hdds.scm.OzoneClientConfig;
import org.apache.hadoop.hdds.scm.XceiverClientManager;
import org.apache.hadoop.hdds.scm.XceiverClientMetrics;
import org.apache.hadoop.hdds.scm.storage.BufferPool;
import org.apache.hadoop.hdds.scm.storage.RatisBlockOutputStream;
import org.apache.hadoop.ozone.ClientConfigForTesting;
import org.apache.hadoop.ozone.MiniOzoneCluster;
import org.apache.hadoop.ozone.OzoneConfigKeys;
import org.apache.hadoop.ozone.client.ObjectStore;
import org.apache.hadoop.ozone.client.OzoneClient;
import org.apache.hadoop.ozone.client.OzoneClientFactory;
import org.apache.hadoop.ozone.client.io.KeyOutputStream;
import org.apache.hadoop.ozone.client.io.OzoneOutputStream;
import org.apache.hadoop.ozone.container.TestHelper;
import org.apache.ozone.test.tag.Flaky;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
class TestBlockOutputStream {
static final int CHUNK_SIZE = 100;
static final int FLUSH_SIZE = 2 * CHUNK_SIZE;
static final int MAX_FLUSH_SIZE = 2 * FLUSH_SIZE;
static final int BLOCK_SIZE = 2 * MAX_FLUSH_SIZE;
static final String VOLUME = "testblockoutputstream";
static final String BUCKET = VOLUME;
private MiniOzoneCluster cluster;
static MiniOzoneCluster createCluster() throws IOException,
InterruptedException, TimeoutException {
return createCluster(5);
}
static MiniOzoneCluster createCluster(int datanodes) throws IOException,
InterruptedException, TimeoutException {
OzoneConfiguration conf = new OzoneConfiguration();
OzoneClientConfig clientConfig = conf.getObject(OzoneClientConfig.class);
clientConfig.setChecksumType(ChecksumType.NONE);
clientConfig.setStreamBufferFlushDelay(false);
clientConfig.setEnablePutblockPiggybacking(true);
conf.setFromObject(clientConfig);
conf.setTimeDuration(OZONE_SCM_STALENODE_INTERVAL, 3, TimeUnit.SECONDS);
conf.setTimeDuration(OZONE_SCM_DEADNODE_INTERVAL, 6, TimeUnit.SECONDS);
conf.setQuietMode(false);
conf.setStorageSize(OZONE_SCM_BLOCK_SIZE, 4, StorageUnit.MB);
conf.setInt(OZONE_DATANODE_PIPELINE_LIMIT, 3);
conf.setBoolean(OzoneConfigKeys.OZONE_HBASE_ENHANCEMENTS_ALLOWED, true);
conf.setBoolean("ozone.client.hbase.enhancements.allowed", true);
DatanodeRatisServerConfig ratisServerConfig =
conf.getObject(DatanodeRatisServerConfig.class);
ratisServerConfig.setRequestTimeOut(Duration.ofSeconds(3));
ratisServerConfig.setWatchTimeOut(Duration.ofSeconds(3));
conf.setFromObject(ratisServerConfig);
RatisClientConfig.RaftConfig raftClientConfig =
conf.getObject(RatisClientConfig.RaftConfig.class);
raftClientConfig.setRpcRequestTimeout(Duration.ofSeconds(3));
raftClientConfig.setRpcWatchRequestTimeout(Duration.ofSeconds(5));
conf.setFromObject(raftClientConfig);
RatisClientConfig ratisClientConfig =
conf.getObject(RatisClientConfig.class);
ratisClientConfig.setWriteRequestTimeout(Duration.ofSeconds(30));
ratisClientConfig.setWatchRequestTimeout(Duration.ofSeconds(30));
conf.setFromObject(ratisClientConfig);
ClientConfigForTesting.newBuilder(StorageUnit.BYTES)
.setBlockSize(BLOCK_SIZE)
.setChunkSize(CHUNK_SIZE)
.setStreamBufferFlushSize(FLUSH_SIZE)
.setStreamBufferMaxSize(MAX_FLUSH_SIZE)
.applyTo(conf);
MiniOzoneCluster cluster = MiniOzoneCluster.newBuilder(conf)
.setNumDatanodes(datanodes)
.build();
cluster.waitForClusterToBeReady();
cluster.waitForPipelineTobeReady(HddsProtos.ReplicationFactor.THREE,
180000);
try (OzoneClient client = cluster.newClient()) {
ObjectStore objectStore = client.getObjectStore();
objectStore.createVolume(VOLUME);
objectStore.getVolume(VOLUME).createBucket(BUCKET);
}
return cluster;
}
@BeforeAll
void init() throws Exception {
cluster = createCluster();
}
@AfterAll
void shutdown() {
if (cluster != null) {
cluster.shutdown();
}
}
private static Stream<Arguments> clientParameters() {
return Stream.of(
Arguments.of(true, true),
Arguments.of(true, false),
Arguments.of(false, true),
Arguments.of(false, false)
);
}
static OzoneClientConfig newClientConfig(ConfigurationSource source,
boolean flushDelay, boolean enablePiggybacking) {
OzoneClientConfig clientConfig = source.getObject(OzoneClientConfig.class);
clientConfig.setChecksumType(ChecksumType.NONE);
clientConfig.setStreamBufferFlushDelay(flushDelay);
clientConfig.setEnablePutblockPiggybacking(enablePiggybacking);
return clientConfig;
}
static OzoneClient newClient(OzoneConfiguration conf,
OzoneClientConfig config) throws IOException {
OzoneConfiguration copy = new OzoneConfiguration(conf);
copy.setFromObject(config);
return OzoneClientFactory.getRpcClient(copy);
}
@ParameterizedTest
@MethodSource("clientParameters")
void testWriteLessThanChunkSize(boolean flushDelay, boolean enablePiggybacking) throws Exception {
OzoneClientConfig config = newClientConfig(cluster.getConf(), flushDelay, enablePiggybacking);
try (OzoneClient client = newClient(cluster.getConf(), config)) {
XceiverClientMetrics metrics =
XceiverClientManager.getXceiverClientMetrics();
long writeChunkCount = metrics.getContainerOpCountMetrics(WriteChunk);
long putBlockCount = metrics.getContainerOpCountMetrics(PutBlock);
long pendingWriteChunkCount =
metrics.getPendingContainerOpCountMetrics(WriteChunk);
long pendingPutBlockCount =
metrics.getPendingContainerOpCountMetrics(PutBlock);
long totalOpCount = metrics.getTotalOpCount();
String keyName = getKeyName();
OzoneOutputStream key = createKey(client, keyName);
int dataLength = 50;
final int totalWriteLength = dataLength * 2;
byte[] data1 = RandomUtils.secure().randomBytes(dataLength);
key.write(data1);
KeyOutputStream keyOutputStream =
assertInstanceOf(KeyOutputStream.class, key.getOutputStream());
assertEquals(1, keyOutputStream.getStreamEntries().size());
RatisBlockOutputStream blockOutputStream =
assertInstanceOf(RatisBlockOutputStream.class,
keyOutputStream.getStreamEntries().get(0).getOutputStream());
// we have written data less than a chunk size, the data will just sit
// in the buffer, with only one buffer being allocated in the buffer pool
BufferPool bufferPool = blockOutputStream.getBufferPool();
assertEquals(1, bufferPool.getSize());
//Just the writtenDataLength will be updated here
assertEquals(dataLength, blockOutputStream.getWrittenDataLength());
// no data will be flushed till now
assertEquals(0, blockOutputStream.getTotalDataFlushedLength());
assertEquals(0, blockOutputStream.getTotalAckDataLength());
assertEquals(pendingWriteChunkCount,
metrics.getPendingContainerOpCountMetrics(WriteChunk));
assertEquals(pendingPutBlockCount,
metrics.getPendingContainerOpCountMetrics(PutBlock));
// commitIndex2FlushedData Map will be empty here
assertEquals(0, blockOutputStream.getCommitIndex2flushedDataMap().size());
// Total write data greater than or equal one chunk
// size to make sure flush will sync data.
key.write(data1);
// This will flush the data and update the flush length and the map.
key.flush();
// flush is a sync call, all pending operations will complete
assertEquals(pendingWriteChunkCount,
metrics.getPendingContainerOpCountMetrics(WriteChunk));
assertEquals(pendingPutBlockCount,
metrics.getPendingContainerOpCountMetrics(PutBlock));
// we have written data less than a chunk size, the data will just sit
// in the buffer, with only one buffer being allocated in the buffer pool
assertEquals(1, bufferPool.getSize());
assertEquals(totalWriteLength, blockOutputStream.getWrittenDataLength());
assertEquals(totalWriteLength,
blockOutputStream.getTotalDataFlushedLength());
assertEquals(0,
blockOutputStream.getCommitIndex2flushedDataMap().size());
// flush ensures watchForCommit updates the total length acknowledged
assertEquals(totalWriteLength, blockOutputStream.getTotalAckDataLength());
assertEquals(1, keyOutputStream.getStreamEntries().size());
// now close the stream, It will update ack length after watchForCommit
key.close();
assertEquals(pendingWriteChunkCount,
metrics.getPendingContainerOpCountMetrics(WriteChunk));
assertEquals(pendingPutBlockCount,
metrics.getPendingContainerOpCountMetrics(PutBlock));
assertEquals(writeChunkCount + 1,
metrics.getContainerOpCountMetrics(WriteChunk));
assertEquals(putBlockCount + 2,
metrics.getContainerOpCountMetrics(PutBlock));
assertEquals(totalOpCount + 3, metrics.getTotalOpCount());
// make sure the bufferPool is empty
assertEquals(0, bufferPool.computeBufferData());
assertEquals(totalWriteLength, blockOutputStream.getTotalAckDataLength());
assertEquals(0, blockOutputStream.getCommitIndex2flushedDataMap().size());
assertEquals(0, keyOutputStream.getStreamEntries().size());
validateData(keyName, data1, client.getObjectStore(), VOLUME, BUCKET);
}
}
@ParameterizedTest
@MethodSource("clientParameters")
@Flaky("HDDS-11564")
void testWriteExactlyFlushSize(boolean flushDelay, boolean enablePiggybacking) throws Exception {
OzoneClientConfig config = newClientConfig(cluster.getConf(), flushDelay, enablePiggybacking);
try (OzoneClient client = newClient(cluster.getConf(), config)) {
XceiverClientMetrics metrics =
XceiverClientManager.getXceiverClientMetrics();
final long writeChunkCount =
metrics.getContainerOpCountMetrics(WriteChunk);
final long putBlockCount =
metrics.getContainerOpCountMetrics(PutBlock);
final long pendingWriteChunkCount =
metrics.getPendingContainerOpCountMetrics(WriteChunk);
final long pendingPutBlockCount =
metrics.getPendingContainerOpCountMetrics(PutBlock);
final long totalOpCount = metrics.getTotalOpCount();
String keyName = getKeyName();
OzoneOutputStream key = createKey(client, keyName);
// write data equal to 2 chunks
int dataLength = FLUSH_SIZE;
byte[] data1 = RandomUtils.secure().randomBytes(dataLength);
key.write(data1);
assertEquals(writeChunkCount + 2,
metrics.getContainerOpCountMetrics(WriteChunk));
assertEquals(putBlockCount + 1,
metrics.getContainerOpCountMetrics(PutBlock));
// The WriteChunk and PutBlock can be completed soon.
assertThat(metrics.getPendingContainerOpCountMetrics(WriteChunk))
.isLessThanOrEqualTo(pendingWriteChunkCount + 2);
assertThat(metrics.getPendingContainerOpCountMetrics(PutBlock))
.isLessThanOrEqualTo(pendingPutBlockCount + 1);
KeyOutputStream keyOutputStream =
assertInstanceOf(KeyOutputStream.class, key.getOutputStream());
assertEquals(1, keyOutputStream.getStreamEntries().size());
RatisBlockOutputStream blockOutputStream =
assertInstanceOf(RatisBlockOutputStream.class,
keyOutputStream.getStreamEntries().get(0).getOutputStream());
// we have just written data equal flush Size = 2 chunks, at this time
// buffer pool will have 2 buffers allocated worth of chunk size
assertEquals(2, blockOutputStream.getBufferPool().getSize());
// writtenDataLength as well flushedDataLength will be updated here
assertEquals(dataLength, blockOutputStream.getWrittenDataLength());
assertEquals(dataLength, blockOutputStream.getTotalDataFlushedLength());
assertEquals(0, blockOutputStream.getTotalAckDataLength());
// Before flush, if there was no pending PutBlock which means it is complete.
// It put a commit index into commitIndexMap.
assertEquals((metrics.getPendingContainerOpCountMetrics(PutBlock) == pendingPutBlockCount) ? 1 : 0,
blockOutputStream.getCommitIndex2flushedDataMap().size());
// Now do a flush.
key.flush();
assertEquals(1, keyOutputStream.getStreamEntries().size());
// The previously written data is equal to flushSize, so no action is
// triggered when execute flush, if flushDelay is enabled.
// If flushDelay is disabled, it will call waitOnFlushFutures to wait all
// putBlocks finished. It was broken because WriteChunk and PutBlock
// can be complete regardless of whether the flush executed or not.
if (flushDelay) {
assertThat(metrics.getPendingContainerOpCountMetrics(WriteChunk))
.isLessThanOrEqualTo(pendingWriteChunkCount + 2);
assertThat(metrics.getPendingContainerOpCountMetrics(PutBlock))
.isLessThanOrEqualTo(pendingWriteChunkCount + 1);
} else {
assertEquals(pendingWriteChunkCount,
metrics.getPendingContainerOpCountMetrics(WriteChunk));
assertEquals(pendingPutBlockCount,
metrics.getPendingContainerOpCountMetrics(PutBlock));
}
// Since the data in the buffer is already flushed, flush here will have
// no impact on the counters and data structures
assertEquals(2, blockOutputStream.getBufferPool().getSize());
// No action is triggered when execute flush, BlockOutputStream will not
// be updated.
assertEquals(flushDelay ? dataLength : 0,
blockOutputStream.getBufferPool().computeBufferData());
assertEquals(dataLength, blockOutputStream.getWrittenDataLength());
assertEquals(dataLength, blockOutputStream.getTotalDataFlushedLength());
// If the flushDelay feature is enabled, nothing happens.
// The assertions will be as same as those before flush.
// If it flushed, the Commit index will be removed.
assertEquals((flushDelay &&
(metrics.getPendingContainerOpCountMetrics(PutBlock) == pendingPutBlockCount)) ? 1 : 0,
blockOutputStream.getCommitIndex2flushedDataMap().size());
assertEquals(flushDelay ? 0 : dataLength,
blockOutputStream.getTotalAckDataLength());
// now close the stream, It will update ack length after watchForCommit
key.close();
assertEquals(dataLength, blockOutputStream.getTotalAckDataLength());
// make sure the bufferPool is empty
assertEquals(0, blockOutputStream.getBufferPool().computeBufferData());
assertEquals(dataLength, blockOutputStream.getTotalAckDataLength());
assertEquals(0, blockOutputStream.getCommitIndex2flushedDataMap().size());
assertEquals(pendingWriteChunkCount,
metrics.getPendingContainerOpCountMetrics(WriteChunk));
assertEquals(pendingPutBlockCount,
metrics.getPendingContainerOpCountMetrics(PutBlock));
assertEquals(writeChunkCount + 2,
metrics.getContainerOpCountMetrics(WriteChunk));
assertEquals(putBlockCount + 2,
metrics.getContainerOpCountMetrics(PutBlock));
assertEquals(totalOpCount + 4, metrics.getTotalOpCount());
assertEquals(0, keyOutputStream.getStreamEntries().size());
validateData(keyName, data1, client.getObjectStore(), VOLUME, BUCKET);
}
}
@ParameterizedTest
@MethodSource("clientParameters")
void testWriteMoreThanChunkSize(boolean flushDelay, boolean enablePiggybacking) throws Exception {
OzoneClientConfig config = newClientConfig(cluster.getConf(), flushDelay, enablePiggybacking);
try (OzoneClient client = newClient(cluster.getConf(), config)) {
XceiverClientMetrics metrics =
XceiverClientManager.getXceiverClientMetrics();
long writeChunkCount = metrics.getContainerOpCountMetrics(
WriteChunk);
long putBlockCount = metrics.getContainerOpCountMetrics(
PutBlock);
long pendingWriteChunkCount = metrics.getPendingContainerOpCountMetrics(
WriteChunk);
long pendingPutBlockCount = metrics.getPendingContainerOpCountMetrics(
PutBlock);
long totalOpCount = metrics.getTotalOpCount();
String keyName = getKeyName();
OzoneOutputStream key = createKey(client, keyName);
// write data more than 1 chunk
int dataLength = CHUNK_SIZE + 50;
byte[] data1 = RandomUtils.secure().randomBytes(dataLength);
key.write(data1);
assertEquals(totalOpCount + 1, metrics.getTotalOpCount());
KeyOutputStream keyOutputStream =
assertInstanceOf(KeyOutputStream.class, key.getOutputStream());
assertEquals(1, keyOutputStream.getStreamEntries().size());
RatisBlockOutputStream blockOutputStream =
assertInstanceOf(RatisBlockOutputStream.class,
keyOutputStream.getStreamEntries().get(0).getOutputStream());
// we have just written data equal flush Size > 1 chunk, at this time
// buffer pool will have 2 buffers allocated worth of chunk size
BufferPool bufferPool = blockOutputStream.getBufferPool();
assertEquals(2, bufferPool.getSize());
// writtenDataLength as well flushedDataLength will be updated here
assertEquals(dataLength, blockOutputStream.getWrittenDataLength());
// since data written is still less than flushLength, flushLength will
// still be 0.
assertEquals(0, blockOutputStream.getTotalDataFlushedLength());
assertEquals(0, blockOutputStream.getTotalAckDataLength());
assertEquals(0, blockOutputStream.getCommitIndex2flushedDataMap().size());
// This will flush the data and update the flush length and the map.
key.flush();
assertEquals(writeChunkCount + 2,
metrics.getContainerOpCountMetrics(WriteChunk));
assertEquals(putBlockCount + ((enablePiggybacking) ? 0 : 1),
metrics.getContainerOpCountMetrics(PutBlock));
assertEquals(pendingWriteChunkCount,
metrics.getPendingContainerOpCountMetrics(WriteChunk));
assertEquals(pendingPutBlockCount,
metrics.getPendingContainerOpCountMetrics(PutBlock));
assertEquals(2, bufferPool.getSize());
assertEquals(dataLength, blockOutputStream.getWrittenDataLength());
assertEquals(dataLength, blockOutputStream.getTotalDataFlushedLength());
assertEquals(0, blockOutputStream.getCommitIndex2flushedDataMap().size());
// flush ensures watchForCommit updates the total length acknowledged
assertEquals(dataLength, blockOutputStream.getTotalAckDataLength());
// now close the stream, It will update ack length after watchForCommit
key.close();
assertEquals(dataLength, blockOutputStream.getTotalAckDataLength());
// make sure the bufferPool is empty
assertEquals(0, bufferPool.computeBufferData());
assertEquals(dataLength, blockOutputStream.getTotalAckDataLength());
assertEquals(0, blockOutputStream.getCommitIndex2flushedDataMap().size());
assertEquals(pendingWriteChunkCount,
metrics.getPendingContainerOpCountMetrics(WriteChunk));
assertEquals(pendingPutBlockCount,
metrics.getPendingContainerOpCountMetrics(PutBlock));
assertEquals(writeChunkCount + 2,
metrics.getContainerOpCountMetrics(WriteChunk));
assertEquals(putBlockCount + ((enablePiggybacking) ? 1 : 2),
metrics.getContainerOpCountMetrics(PutBlock));
assertEquals(totalOpCount + ((enablePiggybacking) ? 3 : 4), metrics.getTotalOpCount());
assertEquals(0, keyOutputStream.getStreamEntries().size());
validateData(keyName, data1, client.getObjectStore(), VOLUME, BUCKET);
}
}
@ParameterizedTest
@MethodSource("clientParameters")
@Flaky("HDDS-11564")
void testWriteMoreThanFlushSize(boolean flushDelay, boolean enablePiggybacking) throws Exception {
OzoneClientConfig config = newClientConfig(cluster.getConf(), flushDelay, enablePiggybacking);
try (OzoneClient client = newClient(cluster.getConf(), config)) {
XceiverClientMetrics metrics =
XceiverClientManager.getXceiverClientMetrics();
long writeChunkCount = metrics.getContainerOpCountMetrics(
WriteChunk);
long putBlockCount = metrics.getContainerOpCountMetrics(
PutBlock);
long pendingWriteChunkCount = metrics.getPendingContainerOpCountMetrics(
WriteChunk);
long pendingPutBlockCount = metrics.getPendingContainerOpCountMetrics(
PutBlock);
long totalOpCount = metrics.getTotalOpCount();
String keyName = getKeyName();
OzoneOutputStream key = createKey(client, keyName);
int dataLength = FLUSH_SIZE + 50;
byte[] data1 = RandomUtils.secure().randomBytes(dataLength);
key.write(data1);
assertEquals(totalOpCount + 3, metrics.getTotalOpCount());
KeyOutputStream keyOutputStream =
assertInstanceOf(KeyOutputStream.class, key.getOutputStream());
assertEquals(1, keyOutputStream.getStreamEntries().size());
RatisBlockOutputStream blockOutputStream =
assertInstanceOf(RatisBlockOutputStream.class,
keyOutputStream.getStreamEntries().get(0).getOutputStream());
// we have just written data more than flush Size(2 chunks), at this time
// buffer pool will have 3 buffers allocated worth of chunk size
assertEquals(3, blockOutputStream.getBufferPool().getSize());
// writtenDataLength as well flushedDataLength will be updated here
assertEquals(dataLength, blockOutputStream.getWrittenDataLength());
assertEquals(FLUSH_SIZE, blockOutputStream.getTotalDataFlushedLength());
assertEquals(0, blockOutputStream.getTotalAckDataLength());
// Before flush, if there was no pending PutBlock which means it is complete.
// It put a commit index into commitIndexMap.
assertEquals((metrics.getPendingContainerOpCountMetrics(PutBlock) == pendingPutBlockCount) ? 1 : 0,
blockOutputStream.getCommitIndex2flushedDataMap().size());
key.flush();
if (flushDelay) {
// If the flushDelay feature is enabled, nothing happens.
// The assertions will be as same as those before flush.
assertEquals(FLUSH_SIZE, blockOutputStream.getTotalDataFlushedLength());
assertEquals((metrics.getPendingContainerOpCountMetrics(PutBlock) == pendingPutBlockCount) ? 1 : 0,
blockOutputStream.getCommitIndex2flushedDataMap().size());
assertEquals(0, blockOutputStream.getTotalAckDataLength());
assertEquals(1, keyOutputStream.getStreamEntries().size());
} else {
assertEquals(dataLength, blockOutputStream.getTotalDataFlushedLength());
assertEquals(0, blockOutputStream.getCommitIndex2flushedDataMap().size());
assertEquals(dataLength, blockOutputStream.getTotalAckDataLength());
assertEquals(1, keyOutputStream.getStreamEntries().size());
}
key.close();
assertEquals(pendingWriteChunkCount,
metrics.getPendingContainerOpCountMetrics(WriteChunk));
assertEquals(pendingPutBlockCount,
metrics.getPendingContainerOpCountMetrics(PutBlock));
assertEquals(writeChunkCount + 3,
metrics.getContainerOpCountMetrics(WriteChunk));
// If the flushDelay was disabled, it sends PutBlock with the data in the buffer.
assertEquals(putBlockCount + (flushDelay ? 2 : 3) - (enablePiggybacking ? 1 : 0),
metrics.getContainerOpCountMetrics(PutBlock));
assertEquals(totalOpCount + (flushDelay ? 5 : 6) - (enablePiggybacking ? 1 : 0),
metrics.getTotalOpCount());
assertEquals(dataLength, blockOutputStream.getTotalAckDataLength());
// make sure the bufferPool is empty
assertEquals(0, blockOutputStream.getBufferPool().computeBufferData());
assertEquals(dataLength, blockOutputStream.getTotalAckDataLength());
assertEquals(0, blockOutputStream.getCommitIndex2flushedDataMap().size());
assertEquals(0, keyOutputStream.getStreamEntries().size());
validateData(keyName, data1, client.getObjectStore(), VOLUME, BUCKET);
}
}
@ParameterizedTest
@MethodSource("clientParameters")
@Flaky("HDDS-11564")
void testWriteExactlyMaxFlushSize(boolean flushDelay, boolean enablePiggybacking) throws Exception {
OzoneClientConfig config = newClientConfig(cluster.getConf(), flushDelay, enablePiggybacking);
try (OzoneClient client = newClient(cluster.getConf(), config)) {
XceiverClientMetrics metrics =
XceiverClientManager.getXceiverClientMetrics();
long writeChunkCount = metrics.getContainerOpCountMetrics(WriteChunk);
long putBlockCount = metrics.getContainerOpCountMetrics(PutBlock);
long pendingWriteChunkCount =
metrics.getPendingContainerOpCountMetrics(WriteChunk);
long pendingPutBlockCount =
metrics.getPendingContainerOpCountMetrics(PutBlock);
long totalOpCount = metrics.getTotalOpCount();
String keyName = getKeyName();
OzoneOutputStream key = createKey(client, keyName);
int dataLength = MAX_FLUSH_SIZE;
byte[] data1 = RandomUtils.secure().randomBytes(dataLength);
key.write(data1);
KeyOutputStream keyOutputStream =
assertInstanceOf(KeyOutputStream.class, key.getOutputStream());
RatisBlockOutputStream blockOutputStream =
assertInstanceOf(RatisBlockOutputStream.class,
keyOutputStream.getStreamEntries().get(0).getOutputStream());
BufferPool bufferPool = blockOutputStream.getBufferPool();
// since it's hitting the full bufferCondition, it will call watchForCommit
// however, the outputstream will not wait for watchForCommit, but the next call to
// write() will need to wait for at least one watchForCommit, indirectly when asking for new buffer allocation.
bufferPool.waitUntilAvailable();
assertThat(metrics.getPendingContainerOpCountMetrics(WriteChunk))
.isLessThanOrEqualTo(pendingWriteChunkCount + 2);
assertThat(metrics.getPendingContainerOpCountMetrics(PutBlock))
.isLessThanOrEqualTo(pendingPutBlockCount + 1);
assertEquals(1, keyOutputStream.getStreamEntries().size());
assertEquals(4, blockOutputStream.getBufferPool().getSize());
// writtenDataLength as well flushedDataLength will be updated here
assertEquals(dataLength, blockOutputStream.getWrittenDataLength());
assertEquals(MAX_FLUSH_SIZE,
blockOutputStream.getTotalDataFlushedLength());
// since data equals to maxBufferSize is written, this will be a blocking
// call and hence will wait for atleast flushSize worth of data to get
// ack'd by all servers right here
assertThat(blockOutputStream.getTotalAckDataLength())
.isGreaterThanOrEqualTo(FLUSH_SIZE);
// watchForCommit will clean up atleast one entry from the map where each
// entry corresponds to flushSize worth of data
assertThat(blockOutputStream.getCommitIndex2flushedDataMap().size())
.isLessThanOrEqualTo(1);
// This will flush the data and update the flush length and the map.
key.flush();
assertEquals(1, keyOutputStream.getStreamEntries().size());
assertEquals(totalOpCount + 6, metrics.getTotalOpCount());
// Since the data in the buffer is already flushed, flush here will have
// no impact on the counters and data structures
assertEquals(4, blockOutputStream.getBufferPool().getSize());
assertEquals(dataLength, blockOutputStream.getWrittenDataLength());
assertEquals(dataLength, blockOutputStream.getTotalDataFlushedLength());
assertThat(blockOutputStream.getCommitIndex2flushedDataMap().size())
.isLessThanOrEqualTo(1);
// now close the stream, it will update ack length after watchForCommit
key.close();
assertEquals(pendingWriteChunkCount,
metrics.getPendingContainerOpCountMetrics(WriteChunk));
assertEquals(pendingPutBlockCount,
metrics.getPendingContainerOpCountMetrics(PutBlock));
assertEquals(writeChunkCount + 4,
metrics.getContainerOpCountMetrics(WriteChunk));
assertEquals(putBlockCount + 3,
metrics.getContainerOpCountMetrics(PutBlock));
assertEquals(totalOpCount + 7, metrics.getTotalOpCount());
assertEquals(dataLength, blockOutputStream.getTotalAckDataLength());
// make sure the bufferPool is empty
assertEquals(0, blockOutputStream.getBufferPool().computeBufferData());
assertEquals(dataLength, blockOutputStream.getTotalAckDataLength());
assertEquals(0, blockOutputStream.getCommitIndex2flushedDataMap().size());
assertEquals(0, keyOutputStream.getStreamEntries().size());
validateData(keyName, data1, client.getObjectStore(), VOLUME, BUCKET);
}
}
@ParameterizedTest
@MethodSource("clientParameters")
@Flaky("HDDS-11564")
void testWriteMoreThanMaxFlushSize(boolean flushDelay, boolean enablePiggybacking) throws Exception {
OzoneClientConfig config = newClientConfig(cluster.getConf(), flushDelay, enablePiggybacking);
try (OzoneClient client = newClient(cluster.getConf(), config)) {
XceiverClientMetrics metrics =
XceiverClientManager.getXceiverClientMetrics();
long writeChunkCount = metrics.getContainerOpCountMetrics(WriteChunk);
long putBlockCount = metrics.getContainerOpCountMetrics(PutBlock);
long pendingWriteChunkCount =
metrics.getPendingContainerOpCountMetrics(WriteChunk);
long pendingPutBlockCount =
metrics.getPendingContainerOpCountMetrics(PutBlock);
long totalOpCount = metrics.getTotalOpCount();
String keyName = getKeyName();
OzoneOutputStream key = createKey(client, keyName);
int dataLength = MAX_FLUSH_SIZE + 50;
// write data more than 1 chunk
byte[] data1 = RandomUtils.secure().randomBytes(dataLength);
key.write(data1);
KeyOutputStream keyOutputStream =
assertInstanceOf(KeyOutputStream.class, key.getOutputStream());
// since it's hitting full-buffer, it will call watchForCommit
// and completes putBlock at least for first flushSize worth of data
assertThat(metrics.getPendingContainerOpCountMetrics(WriteChunk))
.isLessThanOrEqualTo(pendingWriteChunkCount + 2);
assertThat(metrics.getPendingContainerOpCountMetrics(PutBlock))
.isLessThanOrEqualTo(pendingPutBlockCount + 1);
assertEquals(writeChunkCount + 4,
metrics.getContainerOpCountMetrics(WriteChunk));
assertEquals(putBlockCount + 2,
metrics.getContainerOpCountMetrics(PutBlock));
assertEquals(totalOpCount + 6, metrics.getTotalOpCount());
assertEquals(1, keyOutputStream.getStreamEntries().size());
RatisBlockOutputStream blockOutputStream =
assertInstanceOf(RatisBlockOutputStream.class,
keyOutputStream.getStreamEntries().get(0).getOutputStream());
assertThat(blockOutputStream.getBufferPool().getSize())
.isLessThanOrEqualTo(4);
// writtenDataLength as well flushedDataLength will be updated here
assertEquals(dataLength, blockOutputStream.getWrittenDataLength());
assertEquals(MAX_FLUSH_SIZE,
blockOutputStream.getTotalDataFlushedLength());
// since data equals to maxBufferSize is written, this will be a blocking
// call and hence will wait for atleast flushSize worth of data to get
// ack'd by all servers right here
assertThat(blockOutputStream.getTotalAckDataLength())
.isGreaterThanOrEqualTo(FLUSH_SIZE);
// watchForCommit will clean up atleast one entry from the map where each
// entry corresponds to flushSize worth of data
assertThat(blockOutputStream.getCommitIndex2flushedDataMap().size())
.isLessThanOrEqualTo(1);
// Now do a flush.
key.flush();
assertEquals(1, keyOutputStream.getStreamEntries().size());
assertEquals(pendingWriteChunkCount,
metrics.getPendingContainerOpCountMetrics(WriteChunk));
assertEquals(pendingPutBlockCount,
metrics.getPendingContainerOpCountMetrics(PutBlock));
// Since the data in the buffer is already flushed, flush here will have
// no impact on the counters and data structures
assertThat(blockOutputStream.getBufferPool().getSize())
.isLessThanOrEqualTo(4);
assertEquals(dataLength, blockOutputStream.getWrittenDataLength());
// dataLength > MAX_FLUSH_SIZE
assertEquals(flushDelay ? MAX_FLUSH_SIZE : dataLength,
blockOutputStream.getTotalDataFlushedLength());
assertThat(blockOutputStream.getCommitIndex2flushedDataMap().size())
.isLessThanOrEqualTo(2);
// now close the stream, it will update ack length after watchForCommit
key.close();
assertEquals(dataLength, blockOutputStream.getTotalAckDataLength());
// make sure the bufferPool is empty
assertEquals(0, blockOutputStream.getBufferPool().computeBufferData());
assertEquals(pendingWriteChunkCount,
metrics.getPendingContainerOpCountMetrics(WriteChunk));
assertEquals(pendingPutBlockCount,
metrics.getPendingContainerOpCountMetrics(PutBlock));
assertEquals(writeChunkCount + 5,
metrics.getContainerOpCountMetrics(WriteChunk));
// The previous flush did not trigger any action with flushDelay enabled
assertEquals(putBlockCount + (flushDelay ? 2 : 3) + (enablePiggybacking ? 0 : 1),
metrics.getContainerOpCountMetrics(PutBlock));
assertEquals(totalOpCount + (flushDelay ? 7 : 8) + ((enablePiggybacking ? 0 : 1)),
metrics.getTotalOpCount());
assertEquals(dataLength, blockOutputStream.getTotalAckDataLength());
assertEquals(0, blockOutputStream.getCommitIndex2flushedDataMap().size());
assertEquals(0, keyOutputStream.getStreamEntries().size());
validateData(keyName, data1, client.getObjectStore(), VOLUME, BUCKET);
}
}
static OzoneOutputStream createKey(OzoneClient client, String keyName)
throws Exception {
return createKey(client, keyName, 0, ReplicationFactor.THREE);
}
static OzoneOutputStream createKey(OzoneClient client, String keyName,
long size, ReplicationFactor factor) throws Exception {
return TestHelper.createKey(keyName, ReplicationType.RATIS, factor, size,
client.getObjectStore(), VOLUME, BUCKET);
}
static String getKeyName() {
return UUID.randomUUID().toString();
}
}
|
googleapis/google-cloud-java | 37,029 | java-deploy/proto-google-cloud-deploy-v1/src/main/java/com/google/cloud/deploy/v1/Strategy.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/deploy/v1/cloud_deploy.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.deploy.v1;
/**
*
*
* <pre>
* Strategy contains deployment strategy information.
* </pre>
*
* Protobuf type {@code google.cloud.deploy.v1.Strategy}
*/
public final class Strategy extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.deploy.v1.Strategy)
StrategyOrBuilder {
private static final long serialVersionUID = 0L;
// Use Strategy.newBuilder() to construct.
private Strategy(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Strategy() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new Strategy();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.deploy.v1.CloudDeployProto
.internal_static_google_cloud_deploy_v1_Strategy_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.deploy.v1.CloudDeployProto
.internal_static_google_cloud_deploy_v1_Strategy_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.deploy.v1.Strategy.class,
com.google.cloud.deploy.v1.Strategy.Builder.class);
}
private int deploymentStrategyCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object deploymentStrategy_;
public enum DeploymentStrategyCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
STANDARD(1),
CANARY(2),
DEPLOYMENTSTRATEGY_NOT_SET(0);
private final int value;
private DeploymentStrategyCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static DeploymentStrategyCase valueOf(int value) {
return forNumber(value);
}
public static DeploymentStrategyCase forNumber(int value) {
switch (value) {
case 1:
return STANDARD;
case 2:
return CANARY;
case 0:
return DEPLOYMENTSTRATEGY_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public DeploymentStrategyCase getDeploymentStrategyCase() {
return DeploymentStrategyCase.forNumber(deploymentStrategyCase_);
}
public static final int STANDARD_FIELD_NUMBER = 1;
/**
*
*
* <pre>
* Optional. Standard deployment strategy executes a single deploy and
* allows verifying the deployment.
* </pre>
*
* <code>.google.cloud.deploy.v1.Standard standard = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the standard field is set.
*/
@java.lang.Override
public boolean hasStandard() {
return deploymentStrategyCase_ == 1;
}
/**
*
*
* <pre>
* Optional. Standard deployment strategy executes a single deploy and
* allows verifying the deployment.
* </pre>
*
* <code>.google.cloud.deploy.v1.Standard standard = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The standard.
*/
@java.lang.Override
public com.google.cloud.deploy.v1.Standard getStandard() {
if (deploymentStrategyCase_ == 1) {
return (com.google.cloud.deploy.v1.Standard) deploymentStrategy_;
}
return com.google.cloud.deploy.v1.Standard.getDefaultInstance();
}
/**
*
*
* <pre>
* Optional. Standard deployment strategy executes a single deploy and
* allows verifying the deployment.
* </pre>
*
* <code>.google.cloud.deploy.v1.Standard standard = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.cloud.deploy.v1.StandardOrBuilder getStandardOrBuilder() {
if (deploymentStrategyCase_ == 1) {
return (com.google.cloud.deploy.v1.Standard) deploymentStrategy_;
}
return com.google.cloud.deploy.v1.Standard.getDefaultInstance();
}
public static final int CANARY_FIELD_NUMBER = 2;
/**
*
*
* <pre>
* Optional. Canary deployment strategy provides progressive percentage
* based deployments to a Target.
* </pre>
*
* <code>.google.cloud.deploy.v1.Canary canary = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the canary field is set.
*/
@java.lang.Override
public boolean hasCanary() {
return deploymentStrategyCase_ == 2;
}
/**
*
*
* <pre>
* Optional. Canary deployment strategy provides progressive percentage
* based deployments to a Target.
* </pre>
*
* <code>.google.cloud.deploy.v1.Canary canary = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The canary.
*/
@java.lang.Override
public com.google.cloud.deploy.v1.Canary getCanary() {
if (deploymentStrategyCase_ == 2) {
return (com.google.cloud.deploy.v1.Canary) deploymentStrategy_;
}
return com.google.cloud.deploy.v1.Canary.getDefaultInstance();
}
/**
*
*
* <pre>
* Optional. Canary deployment strategy provides progressive percentage
* based deployments to a Target.
* </pre>
*
* <code>.google.cloud.deploy.v1.Canary canary = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.cloud.deploy.v1.CanaryOrBuilder getCanaryOrBuilder() {
if (deploymentStrategyCase_ == 2) {
return (com.google.cloud.deploy.v1.Canary) deploymentStrategy_;
}
return com.google.cloud.deploy.v1.Canary.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (deploymentStrategyCase_ == 1) {
output.writeMessage(1, (com.google.cloud.deploy.v1.Standard) deploymentStrategy_);
}
if (deploymentStrategyCase_ == 2) {
output.writeMessage(2, (com.google.cloud.deploy.v1.Canary) deploymentStrategy_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (deploymentStrategyCase_ == 1) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
1, (com.google.cloud.deploy.v1.Standard) deploymentStrategy_);
}
if (deploymentStrategyCase_ == 2) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
2, (com.google.cloud.deploy.v1.Canary) deploymentStrategy_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.deploy.v1.Strategy)) {
return super.equals(obj);
}
com.google.cloud.deploy.v1.Strategy other = (com.google.cloud.deploy.v1.Strategy) obj;
if (!getDeploymentStrategyCase().equals(other.getDeploymentStrategyCase())) return false;
switch (deploymentStrategyCase_) {
case 1:
if (!getStandard().equals(other.getStandard())) return false;
break;
case 2:
if (!getCanary().equals(other.getCanary())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
switch (deploymentStrategyCase_) {
case 1:
hash = (37 * hash) + STANDARD_FIELD_NUMBER;
hash = (53 * hash) + getStandard().hashCode();
break;
case 2:
hash = (37 * hash) + CANARY_FIELD_NUMBER;
hash = (53 * hash) + getCanary().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.deploy.v1.Strategy parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.deploy.v1.Strategy parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.deploy.v1.Strategy parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.deploy.v1.Strategy parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.deploy.v1.Strategy parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.deploy.v1.Strategy parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.deploy.v1.Strategy parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.deploy.v1.Strategy parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.deploy.v1.Strategy parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.deploy.v1.Strategy parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.deploy.v1.Strategy parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.deploy.v1.Strategy parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.deploy.v1.Strategy prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Strategy contains deployment strategy information.
* </pre>
*
* Protobuf type {@code google.cloud.deploy.v1.Strategy}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.deploy.v1.Strategy)
com.google.cloud.deploy.v1.StrategyOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.deploy.v1.CloudDeployProto
.internal_static_google_cloud_deploy_v1_Strategy_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.deploy.v1.CloudDeployProto
.internal_static_google_cloud_deploy_v1_Strategy_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.deploy.v1.Strategy.class,
com.google.cloud.deploy.v1.Strategy.Builder.class);
}
// Construct using com.google.cloud.deploy.v1.Strategy.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (standardBuilder_ != null) {
standardBuilder_.clear();
}
if (canaryBuilder_ != null) {
canaryBuilder_.clear();
}
deploymentStrategyCase_ = 0;
deploymentStrategy_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.deploy.v1.CloudDeployProto
.internal_static_google_cloud_deploy_v1_Strategy_descriptor;
}
@java.lang.Override
public com.google.cloud.deploy.v1.Strategy getDefaultInstanceForType() {
return com.google.cloud.deploy.v1.Strategy.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.deploy.v1.Strategy build() {
com.google.cloud.deploy.v1.Strategy result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.deploy.v1.Strategy buildPartial() {
com.google.cloud.deploy.v1.Strategy result = new com.google.cloud.deploy.v1.Strategy(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.deploy.v1.Strategy result) {
int from_bitField0_ = bitField0_;
}
private void buildPartialOneofs(com.google.cloud.deploy.v1.Strategy result) {
result.deploymentStrategyCase_ = deploymentStrategyCase_;
result.deploymentStrategy_ = this.deploymentStrategy_;
if (deploymentStrategyCase_ == 1 && standardBuilder_ != null) {
result.deploymentStrategy_ = standardBuilder_.build();
}
if (deploymentStrategyCase_ == 2 && canaryBuilder_ != null) {
result.deploymentStrategy_ = canaryBuilder_.build();
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.deploy.v1.Strategy) {
return mergeFrom((com.google.cloud.deploy.v1.Strategy) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.deploy.v1.Strategy other) {
if (other == com.google.cloud.deploy.v1.Strategy.getDefaultInstance()) return this;
switch (other.getDeploymentStrategyCase()) {
case STANDARD:
{
mergeStandard(other.getStandard());
break;
}
case CANARY:
{
mergeCanary(other.getCanary());
break;
}
case DEPLOYMENTSTRATEGY_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getStandardFieldBuilder().getBuilder(), extensionRegistry);
deploymentStrategyCase_ = 1;
break;
} // case 10
case 18:
{
input.readMessage(getCanaryFieldBuilder().getBuilder(), extensionRegistry);
deploymentStrategyCase_ = 2;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int deploymentStrategyCase_ = 0;
private java.lang.Object deploymentStrategy_;
public DeploymentStrategyCase getDeploymentStrategyCase() {
return DeploymentStrategyCase.forNumber(deploymentStrategyCase_);
}
public Builder clearDeploymentStrategy() {
deploymentStrategyCase_ = 0;
deploymentStrategy_ = null;
onChanged();
return this;
}
private int bitField0_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.deploy.v1.Standard,
com.google.cloud.deploy.v1.Standard.Builder,
com.google.cloud.deploy.v1.StandardOrBuilder>
standardBuilder_;
/**
*
*
* <pre>
* Optional. Standard deployment strategy executes a single deploy and
* allows verifying the deployment.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.Standard standard = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the standard field is set.
*/
@java.lang.Override
public boolean hasStandard() {
return deploymentStrategyCase_ == 1;
}
/**
*
*
* <pre>
* Optional. Standard deployment strategy executes a single deploy and
* allows verifying the deployment.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.Standard standard = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The standard.
*/
@java.lang.Override
public com.google.cloud.deploy.v1.Standard getStandard() {
if (standardBuilder_ == null) {
if (deploymentStrategyCase_ == 1) {
return (com.google.cloud.deploy.v1.Standard) deploymentStrategy_;
}
return com.google.cloud.deploy.v1.Standard.getDefaultInstance();
} else {
if (deploymentStrategyCase_ == 1) {
return standardBuilder_.getMessage();
}
return com.google.cloud.deploy.v1.Standard.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Optional. Standard deployment strategy executes a single deploy and
* allows verifying the deployment.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.Standard standard = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setStandard(com.google.cloud.deploy.v1.Standard value) {
if (standardBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
deploymentStrategy_ = value;
onChanged();
} else {
standardBuilder_.setMessage(value);
}
deploymentStrategyCase_ = 1;
return this;
}
/**
*
*
* <pre>
* Optional. Standard deployment strategy executes a single deploy and
* allows verifying the deployment.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.Standard standard = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setStandard(com.google.cloud.deploy.v1.Standard.Builder builderForValue) {
if (standardBuilder_ == null) {
deploymentStrategy_ = builderForValue.build();
onChanged();
} else {
standardBuilder_.setMessage(builderForValue.build());
}
deploymentStrategyCase_ = 1;
return this;
}
/**
*
*
* <pre>
* Optional. Standard deployment strategy executes a single deploy and
* allows verifying the deployment.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.Standard standard = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeStandard(com.google.cloud.deploy.v1.Standard value) {
if (standardBuilder_ == null) {
if (deploymentStrategyCase_ == 1
&& deploymentStrategy_ != com.google.cloud.deploy.v1.Standard.getDefaultInstance()) {
deploymentStrategy_ =
com.google.cloud.deploy.v1.Standard.newBuilder(
(com.google.cloud.deploy.v1.Standard) deploymentStrategy_)
.mergeFrom(value)
.buildPartial();
} else {
deploymentStrategy_ = value;
}
onChanged();
} else {
if (deploymentStrategyCase_ == 1) {
standardBuilder_.mergeFrom(value);
} else {
standardBuilder_.setMessage(value);
}
}
deploymentStrategyCase_ = 1;
return this;
}
/**
*
*
* <pre>
* Optional. Standard deployment strategy executes a single deploy and
* allows verifying the deployment.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.Standard standard = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearStandard() {
if (standardBuilder_ == null) {
if (deploymentStrategyCase_ == 1) {
deploymentStrategyCase_ = 0;
deploymentStrategy_ = null;
onChanged();
}
} else {
if (deploymentStrategyCase_ == 1) {
deploymentStrategyCase_ = 0;
deploymentStrategy_ = null;
}
standardBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Optional. Standard deployment strategy executes a single deploy and
* allows verifying the deployment.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.Standard standard = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.cloud.deploy.v1.Standard.Builder getStandardBuilder() {
return getStandardFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. Standard deployment strategy executes a single deploy and
* allows verifying the deployment.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.Standard standard = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.cloud.deploy.v1.StandardOrBuilder getStandardOrBuilder() {
if ((deploymentStrategyCase_ == 1) && (standardBuilder_ != null)) {
return standardBuilder_.getMessageOrBuilder();
} else {
if (deploymentStrategyCase_ == 1) {
return (com.google.cloud.deploy.v1.Standard) deploymentStrategy_;
}
return com.google.cloud.deploy.v1.Standard.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Optional. Standard deployment strategy executes a single deploy and
* allows verifying the deployment.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.Standard standard = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.deploy.v1.Standard,
com.google.cloud.deploy.v1.Standard.Builder,
com.google.cloud.deploy.v1.StandardOrBuilder>
getStandardFieldBuilder() {
if (standardBuilder_ == null) {
if (!(deploymentStrategyCase_ == 1)) {
deploymentStrategy_ = com.google.cloud.deploy.v1.Standard.getDefaultInstance();
}
standardBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.deploy.v1.Standard,
com.google.cloud.deploy.v1.Standard.Builder,
com.google.cloud.deploy.v1.StandardOrBuilder>(
(com.google.cloud.deploy.v1.Standard) deploymentStrategy_,
getParentForChildren(),
isClean());
deploymentStrategy_ = null;
}
deploymentStrategyCase_ = 1;
onChanged();
return standardBuilder_;
}
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.deploy.v1.Canary,
com.google.cloud.deploy.v1.Canary.Builder,
com.google.cloud.deploy.v1.CanaryOrBuilder>
canaryBuilder_;
/**
*
*
* <pre>
* Optional. Canary deployment strategy provides progressive percentage
* based deployments to a Target.
* </pre>
*
* <code>.google.cloud.deploy.v1.Canary canary = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the canary field is set.
*/
@java.lang.Override
public boolean hasCanary() {
return deploymentStrategyCase_ == 2;
}
/**
*
*
* <pre>
* Optional. Canary deployment strategy provides progressive percentage
* based deployments to a Target.
* </pre>
*
* <code>.google.cloud.deploy.v1.Canary canary = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The canary.
*/
@java.lang.Override
public com.google.cloud.deploy.v1.Canary getCanary() {
if (canaryBuilder_ == null) {
if (deploymentStrategyCase_ == 2) {
return (com.google.cloud.deploy.v1.Canary) deploymentStrategy_;
}
return com.google.cloud.deploy.v1.Canary.getDefaultInstance();
} else {
if (deploymentStrategyCase_ == 2) {
return canaryBuilder_.getMessage();
}
return com.google.cloud.deploy.v1.Canary.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Optional. Canary deployment strategy provides progressive percentage
* based deployments to a Target.
* </pre>
*
* <code>.google.cloud.deploy.v1.Canary canary = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setCanary(com.google.cloud.deploy.v1.Canary value) {
if (canaryBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
deploymentStrategy_ = value;
onChanged();
} else {
canaryBuilder_.setMessage(value);
}
deploymentStrategyCase_ = 2;
return this;
}
/**
*
*
* <pre>
* Optional. Canary deployment strategy provides progressive percentage
* based deployments to a Target.
* </pre>
*
* <code>.google.cloud.deploy.v1.Canary canary = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setCanary(com.google.cloud.deploy.v1.Canary.Builder builderForValue) {
if (canaryBuilder_ == null) {
deploymentStrategy_ = builderForValue.build();
onChanged();
} else {
canaryBuilder_.setMessage(builderForValue.build());
}
deploymentStrategyCase_ = 2;
return this;
}
/**
*
*
* <pre>
* Optional. Canary deployment strategy provides progressive percentage
* based deployments to a Target.
* </pre>
*
* <code>.google.cloud.deploy.v1.Canary canary = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeCanary(com.google.cloud.deploy.v1.Canary value) {
if (canaryBuilder_ == null) {
if (deploymentStrategyCase_ == 2
&& deploymentStrategy_ != com.google.cloud.deploy.v1.Canary.getDefaultInstance()) {
deploymentStrategy_ =
com.google.cloud.deploy.v1.Canary.newBuilder(
(com.google.cloud.deploy.v1.Canary) deploymentStrategy_)
.mergeFrom(value)
.buildPartial();
} else {
deploymentStrategy_ = value;
}
onChanged();
} else {
if (deploymentStrategyCase_ == 2) {
canaryBuilder_.mergeFrom(value);
} else {
canaryBuilder_.setMessage(value);
}
}
deploymentStrategyCase_ = 2;
return this;
}
/**
*
*
* <pre>
* Optional. Canary deployment strategy provides progressive percentage
* based deployments to a Target.
* </pre>
*
* <code>.google.cloud.deploy.v1.Canary canary = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearCanary() {
if (canaryBuilder_ == null) {
if (deploymentStrategyCase_ == 2) {
deploymentStrategyCase_ = 0;
deploymentStrategy_ = null;
onChanged();
}
} else {
if (deploymentStrategyCase_ == 2) {
deploymentStrategyCase_ = 0;
deploymentStrategy_ = null;
}
canaryBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Optional. Canary deployment strategy provides progressive percentage
* based deployments to a Target.
* </pre>
*
* <code>.google.cloud.deploy.v1.Canary canary = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.cloud.deploy.v1.Canary.Builder getCanaryBuilder() {
return getCanaryFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. Canary deployment strategy provides progressive percentage
* based deployments to a Target.
* </pre>
*
* <code>.google.cloud.deploy.v1.Canary canary = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.cloud.deploy.v1.CanaryOrBuilder getCanaryOrBuilder() {
if ((deploymentStrategyCase_ == 2) && (canaryBuilder_ != null)) {
return canaryBuilder_.getMessageOrBuilder();
} else {
if (deploymentStrategyCase_ == 2) {
return (com.google.cloud.deploy.v1.Canary) deploymentStrategy_;
}
return com.google.cloud.deploy.v1.Canary.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Optional. Canary deployment strategy provides progressive percentage
* based deployments to a Target.
* </pre>
*
* <code>.google.cloud.deploy.v1.Canary canary = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.deploy.v1.Canary,
com.google.cloud.deploy.v1.Canary.Builder,
com.google.cloud.deploy.v1.CanaryOrBuilder>
getCanaryFieldBuilder() {
if (canaryBuilder_ == null) {
if (!(deploymentStrategyCase_ == 2)) {
deploymentStrategy_ = com.google.cloud.deploy.v1.Canary.getDefaultInstance();
}
canaryBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.deploy.v1.Canary,
com.google.cloud.deploy.v1.Canary.Builder,
com.google.cloud.deploy.v1.CanaryOrBuilder>(
(com.google.cloud.deploy.v1.Canary) deploymentStrategy_,
getParentForChildren(),
isClean());
deploymentStrategy_ = null;
}
deploymentStrategyCase_ = 2;
onChanged();
return canaryBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.deploy.v1.Strategy)
}
// @@protoc_insertion_point(class_scope:google.cloud.deploy.v1.Strategy)
private static final com.google.cloud.deploy.v1.Strategy DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.deploy.v1.Strategy();
}
public static com.google.cloud.deploy.v1.Strategy getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Strategy> PARSER =
new com.google.protobuf.AbstractParser<Strategy>() {
@java.lang.Override
public Strategy parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<Strategy> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Strategy> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.deploy.v1.Strategy getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/graal | 37,417 | sdk/src/org.graalvm.polyglot/src/org/graalvm/polyglot/io/FileSystem.java | /*
* Copyright (c) 2018, 2025, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* The Universal Permissive License (UPL), Version 1.0
*
* Subject to the condition set forth below, permission is hereby granted to any
* person obtaining a copy of this software, associated documentation and/or
* data (collectively the "Software"), free of charge and under any and all
* copyright rights in the Software, and any and all patent rights owned or
* freely licensable by each licensor hereunder covering either (i) the
* unmodified Software as contributed to or provided by such licensor, or (ii)
* the Larger Works (as defined below), to deal in both
*
* (a) the Software, and
*
* (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if
* one is included with the Software each a "Larger Work" to which the Software
* is contributed by such licensors),
*
* without restriction, including without limitation the rights to copy, create
* derivative works of, display, perform, and distribute the Software and make,
* use, sell, offer for sale, import, export, have made, and have sold the
* Software and the Larger Work(s), and to sublicense the foregoing rights on
* either these or other terms.
*
* This license is subject to the following condition:
*
* The above copyright notice and either this complete permission notice or at a
* minimum a reference to the UPL must be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.graalvm.polyglot.io;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.nio.channels.SeekableByteChannel;
import java.nio.charset.Charset;
import java.nio.file.AccessMode;
import java.nio.file.AtomicMoveNotSupportedException;
import java.nio.file.CopyOption;
import java.nio.file.DirectoryNotEmptyException;
import java.nio.file.DirectoryStream;
import java.nio.file.FileAlreadyExistsException;
import java.nio.file.LinkOption;
import java.nio.file.NoSuchFileException;
import java.nio.file.NotDirectoryException;
import java.nio.file.NotLinkException;
import java.nio.file.OpenOption;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.nio.file.StandardOpenOption;
import java.nio.file.attribute.FileAttribute;
import java.nio.file.attribute.FileAttributeView;
import java.nio.file.spi.FileSystemProvider;
import java.util.Collection;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.function.Predicate;
import org.graalvm.polyglot.Context;
import org.graalvm.polyglot.Engine;
import org.graalvm.polyglot.io.IOAccess.Builder;
/**
* Service-provider for {@code Truffle} files.
*
* @since 19.0
*/
public interface FileSystem {
/**
* Parses a path from an {@link URI}.
*
* @param uri the {@link URI} to be converted to {@link Path}
* @return the {@link Path} representing given {@link URI}
* @throws UnsupportedOperationException when {@link URI} scheme is not supported
* @throws IllegalArgumentException if preconditions on the {@code uri} do not hold. The format
* of the URI is {@link FileSystem} specific.
* @since 19.0
*/
Path parsePath(URI uri);
/**
* Parses a path from a {@link String}. This method is called only on the {@link FileSystem}
* with {@code file} scheme.
*
* @param path the string path to be converted to {@link Path}
* @return the {@link Path}
* @throws UnsupportedOperationException when the {@link FileSystem} supports only {@link URI}
* @throws IllegalArgumentException if the {@code path} string cannot be converted to a
* {@link Path}
* @since 19.0
*/
Path parsePath(String path);
/**
* Checks existence and accessibility of a file.
*
* @param path the path to the file to check
* @param modes the access modes to check, possibly empty to check existence only.
* @param linkOptions options determining how the symbolic links should be handled
* @throws NoSuchFileException if the file denoted by the path does not exist
* @throws IOException in case of IO error
* @throws SecurityException if this {@link FileSystem} denied the operation
* @since 19.0
*/
void checkAccess(Path path, Set<? extends AccessMode> modes, LinkOption... linkOptions) throws IOException;
/**
* Creates a directory.
*
* @param dir the directory to create
* @param attrs the optional attributes to set atomically when creating the directory
* @throws FileAlreadyExistsException if a file on given path already exists
* @throws IOException in case of IO error
* @throws UnsupportedOperationException if the attributes contain an attribute which cannot be
* set atomically
* @throws SecurityException if this {@link FileSystem} denied the operation
* @since 19.0
*/
void createDirectory(Path dir, FileAttribute<?>... attrs) throws IOException;
/**
* Deletes a file.
*
* @param path the path to the file to delete
* @throws NoSuchFileException if a file on given path does not exist
* @throws DirectoryNotEmptyException if the path denotes a non empty directory
* @throws IOException in case of IO error
* @throws SecurityException if this {@link FileSystem} denied the operation
* @since 19.0
*/
void delete(Path path) throws IOException;
/**
* Opens or creates a file returning a {@link SeekableByteChannel} to access the file content.
*
* @param path the path to the file to open
* @param options the options specifying how the file should be opened
* @param attrs the optional attributes to set atomically when creating the new file
* @return the created {@link SeekableByteChannel}
* @throws FileAlreadyExistsException if {@link StandardOpenOption#CREATE_NEW} option is set and
* a file already exists on given path
* @throws IOException in case of IO error
* @throws UnsupportedOperationException if the attributes contain an attribute which cannot be
* set atomically
* @throws IllegalArgumentException in case of invalid options combination
* @throws SecurityException if this {@link FileSystem} denied the operation
* @since 19.0
*/
SeekableByteChannel newByteChannel(Path path, Set<? extends OpenOption> options, FileAttribute<?>... attrs) throws IOException;
/**
* Returns directory entries.
*
* @param dir the path to the directory to iterate entries for
* @param filter the filter
* @return the new {@link DirectoryStream}
* @throws NotDirectoryException when given path does not denote a directory
* @throws IOException in case of IO error
* @throws SecurityException if this {@link FileSystem} denied the operation
* @since 19.0
*/
DirectoryStream<Path> newDirectoryStream(Path dir, DirectoryStream.Filter<? super Path> filter) throws IOException;
/**
* Resolves given path to an absolute path.
*
* @param path the path to resolve, may be a non normalized path
* @return an absolute {@link Path}
* @throws SecurityException if this {@link FileSystem} denied the operation
* @since 19.0
*/
Path toAbsolutePath(Path path);
/**
* Returns the real (canonical) path of an existing file.
*
* @param path the path to resolve, may be a non normalized path
* @param linkOptions options determining how the symbolic links should be handled
* @return an absolute canonical path
* @throws IOException in case of IO error
* @throws SecurityException if this {@link FileSystem} denied the operation
* @since 19.0
*/
Path toRealPath(Path path, LinkOption... linkOptions) throws IOException;
/**
* Reads a file's attributes as a bulk operation.
*
* @param path the path to file to read attributes for
* @param attributes the attributes to read. The {@code attributes} parameter has the form:
* {@code [view-name:]attribute-list}. The optional {@code view-name} corresponds to
* {@link FileAttributeView#name()} and determines the set of attributes, the default
* value is {@code "basic"}. The {@code attribute-list} is a comma separated list of
* attributes. If the {@code attribute-list} contains {@code '*'} then all the
* attributes from given view are read.
* @param options the options determining how the symbolic links should be handled
* @return the {@link Map} containing the file attributes. The map's keys are attribute names,
* map's values are the attribute values. The map may contain a subset of required
* attributes in case when the {@code FileSystem} does not support some of the required
* attributes.
* @throws UnsupportedOperationException if the attribute view is not supported. At least the
* {@code "basic"} attribute view has to be supported by the file system.
* @throws IllegalArgumentException is the {@code attribute-list} is empty or contains an
* unknown attribute
* @throws IOException in case of IO error
* @throws SecurityException if this {@link FileSystem} denied the operation
* @since 19.0
*/
Map<String, Object> readAttributes(Path path, String attributes, LinkOption... options) throws IOException;
/**
* Sets a file's attribute.
*
* @param path the path to file to set an attribute to
* @param attribute the attribute to set. The {@code attribute} parameter has the form:
* {@code [view-name:]attribute-name}. The optional {@code view-name} corresponds to
* {@link FileAttributeView#name()} and determines the set of attributes, the default
* value is {@code "basic"}. The {@code attribute-name} is a name of an attribute.
* @param value the attribute value
* @param options the options determining how the symbolic links should be handled
* @throws ClassCastException if {@code value} is not of the expected type or {@code value} is a
* {@link Collection} containing element of a non expected type
* @throws UnsupportedOperationException if the attribute view is not supported.
* @throws IllegalArgumentException is the {@code attribute-name} is an unknown attribute or
* {@code value} has an inappropriate value
* @throws IOException in case of IO error
* @throws SecurityException if this {@link FileSystem} denied the operation
* @since 19.0
*/
default void setAttribute(Path path, String attribute, Object value, LinkOption... options) throws IOException {
throw new UnsupportedOperationException("Setting attributes is not supported");
}
/**
* Copies source file to target file.
*
* @param source the path to file to copy
* @param target the path to the target file
* @param options the options specifying how the copy should be performed, see
* {@link StandardCopyOption}
* @throws UnsupportedOperationException if {@code options} contains unsupported option
* @throws FileAlreadyExistsException if the target path already exists and the {@code options}
* don't contain {@link StandardCopyOption#REPLACE_EXISTING} option
* @throws DirectoryNotEmptyException if the {@code options} contain
* {@link StandardCopyOption#REPLACE_EXISTING} but the {@code target} is a non empty
* directory
* @throws IOException in case of IO error
* @throws SecurityException if this {@link FileSystem} denied the operation
* @since 19.0
*/
default void copy(Path source, Path target, CopyOption... options) throws IOException {
IOHelper.copy(source, target, this, options);
}
/**
* Moves (renames) source file to target file.
*
* @param source the path to file to move
* @param target the path to the target file
* @param options the options specifying how the move should be performed, see
* {@link StandardCopyOption}
* @throws UnsupportedOperationException if {@code options} contains unsupported option
* @throws FileAlreadyExistsException if the target path already exists and the {@code options}
* don't contain {@link StandardCopyOption#REPLACE_EXISTING} option
* @throws DirectoryNotEmptyException if the {@code options} contain
* {@link StandardCopyOption#REPLACE_EXISTING} but the {@code target} is a non empty
* directory
* @throws AtomicMoveNotSupportedException if the {@code options} contain
* {@link StandardCopyOption#ATOMIC_MOVE} but file cannot be moved atomically
* @throws IOException in case of IO error
* @throws SecurityException if this {@link FileSystem} denied the operation
* @since 19.0
*/
default void move(Path source, Path target, CopyOption... options) throws IOException {
IOHelper.move(source, target, this, options);
}
/**
* Creates a new link for an existing file.
*
* @param link the path to link to create
* @param existing the path to existing file
* @throws UnsupportedOperationException if links are not supported by file system
* @throws FileAlreadyExistsException if a file on given link path already exists
* @throws IOException in case of IO error
* @throws SecurityException if this {@link FileSystem} denied the operation
* @since 19.0
*/
default void createLink(Path link, Path existing) throws IOException {
throw new UnsupportedOperationException("Links are not supported");
}
/**
* Creates a new symbolic link.
*
* @param link the path to symbolic link to create
* @param target the target path of the symbolic link
* @param attrs the optional attributes to set atomically when creating the new symbolic link
* @throws UnsupportedOperationException if symbolic links are not supported by file system
* @throws FileAlreadyExistsException if a file on given link path already exists
* @throws IOException in case of IO error
* @throws SecurityException if this {@link FileSystem} denied the operation
* @since 19.0
*/
default void createSymbolicLink(Path link, Path target, FileAttribute<?>... attrs) throws IOException {
throw new UnsupportedOperationException("Links are not supported");
}
/**
* Reads the target of the symbolic link.
*
* @param link the path to symbolic link to read
* @return the {@link Path} representing the symbolic link target
* @throws UnsupportedOperationException if symbolic links are not supported by file system
* @throws NotLinkException if the {@code link} does not denote a symbolic link
* @throws IOException in case of IO error
* @throws SecurityException if this {@link FileSystem} denied the operation
* @since 19.0
*/
default Path readSymbolicLink(Path link) throws IOException {
throw new UnsupportedOperationException("Links are not supported");
}
/**
* Sets the current working directory. The current working directory is used to resolve non
* absolute paths in {@link FileSystem} operations.
*
* @param currentWorkingDirectory the new current working directory
* @throws UnsupportedOperationException if setting of the current working directory is not
* supported
* @throws IllegalArgumentException if the {@code currentWorkingDirectory} is not a valid
* current working directory
* @throws SecurityException if {@code currentWorkingDirectory} is not readable
* @since 19.0
*/
default void setCurrentWorkingDirectory(Path currentWorkingDirectory) {
throw new UnsupportedOperationException("Setting current working directory is not supported.");
}
/**
* Returns the name separator used to separate names in a path string. The separator is used
* when creating path strings by invoking the {@link Path#toString() toString()} method.
*
* @return the name separator
* @since 19.0
*/
default String getSeparator() {
return parsePath("").getFileSystem().getSeparator();
}
/**
* Returns the path separator used to separate filenames in a path list. On UNIX the path
* separator is {@code ':'}. On Windows it's {@code ';'}.
*
* @return the path separator
* @since 19.1.0
*/
default String getPathSeparator() {
return File.pathSeparator;
}
/**
* Returns a MIME type for given path. An optional operation for {@link FileSystem filesystem}
* implementations which can provide MIME types in an efficient way.
*
* @param path the file to find a MIME type for
* @return the MIME type or {@code null} if the MIME type is not recognized or the
* {@link FileSystem filesystem} does not support MIME type detection
* @since 19.0
*/
default String getMimeType(Path path) {
Objects.requireNonNull(path);
return null;
}
/**
* Returns an file encoding for given path. An optional operation for {@link FileSystem
* filesystem} implementations which can provide file encodings in an efficient way.
*
* @param path the file to find an file encoding for
* @return the file encoding or {@code null} if the file encoding is not detected or the
* {@link FileSystem filesystem} does not support file encoding detection
* @since 19.0
*/
default Charset getEncoding(Path path) {
Objects.requireNonNull(path);
return null;
}
/**
* Returns the default temporary directory.
*
* @since 19.3.0
*/
default Path getTempDirectory() {
throw new UnsupportedOperationException("Temporary directories not supported");
}
/**
* Tests if the given paths refer to the same physical file.
*
* The default implementation firstly converts the paths into absolute paths. If the absolute
* paths are equal it returns {@code true} without checking if the file exists. Otherwise, this
* method converts the paths into canonical representations and tests the canonical paths for
* equality. The {@link FileSystem} may re-implement the method with a more efficient test. When
* re-implemented the method must have the same security privileges as the
* {@link #toAbsolutePath(Path) toAbsolutePath} and {@link #toRealPath(Path, LinkOption...)
* toRealPath}.
*
* @param path1 the path to the file
* @param path2 the other path
* @param options the options determining how the symbolic links should be handled
* @return {@code true} if the given paths refer to the same physical file
* @throws IOException in case of IO error
* @throws SecurityException if this {@link FileSystem} denied the operation
* @since 20.2.0
*/
default boolean isSameFile(Path path1, Path path2, LinkOption... options) throws IOException {
if (toAbsolutePath(path1).equals(toAbsolutePath(path2))) {
return true;
}
return toRealPath(path1, options).equals(toRealPath(path2, options));
}
/**
* Returns the size, in bytes, of the file store that contains the given {@code path}. If the
* file store's size exceeds {@link Long#MAX_VALUE}, {@code Long.MAX_VALUE} is returned.
*
* @param path the path whose file store size is to be determined
* @return the size of the file store in bytes
* @throws UnsupportedOperationException if the file system does not support retrieving file
* store information
* @throws IOException if an I/O error occurs while accessing the file store
* @throws SecurityException if the {@link FileSystem} implementation denied the operation
* @since 25.0.0
*/
default long getFileStoreTotalSpace(Path path) throws IOException {
throw new UnsupportedOperationException("GetFileStoreTotalSpace is not supported");
}
/**
* Returns the number of unallocated bytes in the file store that contains the given
* {@code path}. The returned value represents the raw free space on the storage device,
* regardless of access permissions or user quotas. If the number of unallocated bytes exceeds
* {@link Long#MAX_VALUE}, {@code Long.MAX_VALUE} is returned. Note that the value may be
* imprecise, as it can change at any time due to external I/O operations, including those
* performed outside this virtual machine.
*
* @param path the path whose file store is to be queried
* @return the number of unallocated bytes
* @throws UnsupportedOperationException if the file system does not support retrieving file
* store information
* @throws IOException if an I/O error occurs while accessing the file store
* @throws SecurityException if the {@link FileSystem} implementation denied the operation
* @since 25.0.0
*/
default long getFileStoreUnallocatedSpace(Path path) throws IOException {
throw new UnsupportedOperationException("GetFileStoreUnallocatedSpace is not supported");
}
/**
* Returns the number of bytes available to this Java virtual machine on the file store that
* contains the given {@code path}. Unlike {@link #getFileStoreUnallocatedSpace(Path)}, this
* method accounts for operating system level restrictions, user quotas, and file system
* permissions, and therefore may return a smaller value. If the available space exceeds
* {@link Long#MAX_VALUE}, {@code Long.MAX_VALUE} is returned. Note that the returned value may
* be imprecise, as it can change at any time due to external I/O activity, including operations
* performed outside this virtual machine.
*
* @param path the path whose file store is to be queried
* @return the number of usable bytes available to this Java virtual machine
* @throws UnsupportedOperationException if the file system does not support retrieving file
* store information
* @throws IOException if an I/O error occurs while accessing the file store
* @throws SecurityException if the {@link FileSystem} implementation denied the operation
* @since 25.0.0
*/
default long getFileStoreUsableSpace(Path path) throws IOException {
throw new UnsupportedOperationException("GetFileStoreUsableSpace is not supported");
}
/**
* Returns the number of bytes per block in the file store that contains the given {@code path}.
*
* @param path the path whose file store is to be queried
* @return the block size
* @throws UnsupportedOperationException if the file system does not support retrieving file
* store information
* @throws IOException if an I/O error occurs while accessing the file store
* @throws SecurityException if the {@link FileSystem} implementation denied the operation
* @since 25.0.0
*/
default long getFileStoreBlockSize(Path path) throws IOException {
throw new UnsupportedOperationException("GetFileStoreBlockSize is not supported");
}
/**
* Determines whether the file store containing the given {@code path} is read-only.
* <p>
* Note that even if the file store is not read-only, individual write operations may still be
* denied due to restrictions imposed by the {@link FileSystem} implementation, operating system
* level policies, user quotas, or file system permissions.
*
* @param path the path whose file store is to be queried
* @throws UnsupportedOperationException if the file system does not support retrieving file
* store information
* @throws IOException if an I/O error occurs while accessing the file store
* @throws SecurityException if the {@link FileSystem} implementation denied the operation
* @since 25.0.0
*/
default boolean isFileStoreReadOnly(Path path) throws IOException {
throw new UnsupportedOperationException("IsFileStoreReadOnly is not supported");
}
/**
* Creates a {@link FileSystem} implementation based on the host Java NIO. The returned instance
* can be used as a delegate by a decorating {@link FileSystem}.
* <p>
* For an untrusted code execution, access to the host filesystem should be prevented either by
* using {@link IOAccess#NONE} or an {@link #newFileSystem(java.nio.file.FileSystem)} in-memory
* filesystem}. For more details on executing untrusted code, see the
* <a href="https://www.graalvm.org/dev/security-guide/polyglot-sandbox/">Polyglot Sandboxing
* Security Guide</a>.
* <p>
* The following example shows a {@link FileSystem} logging filesystem operations.
*
* <pre>
* class TracingFileSystem implements FileSystem {
*
* private static final Logger LOGGER = Logger.getLogger(TracingFileSystem.class.getName());
*
* private final FileSystem delegate;
*
* TracingFileSystem() {
* this.delegate = FileSystem.newDefaultFileSystem();
* }
*
* @Override
* public Path parsePath(String path) {
* return delegate.parsePath(path);
* }
*
* @Override
* public Path parsePath(URI uri) {
* return delegate.parsePath(uri);
* }
*
* @Override
* public SeekableByteChannel newByteChannel(Path path,
* Set<? extends OpenOption> options,
* FileAttribute<?>... attrs) throws IOException {
* boolean success = false;
* try {
* SeekableByteChannel result = delegate.newByteChannel(path, options, attrs);
* success = true;
* return result;
* } finally {
* trace("newByteChannel", path, success);
* }
* }
*
* ...
*
* private void trace(String operation, Path path, boolean success) {
* LOGGER.log(Level.FINE, "The {0} request for the path {1} {2}.",new Object[] {
* operation, path, success ? "was successful" : "failed"
* });
* }
* }
* </pre>
*
* @see Builder#fileSystem(FileSystem)
* @see org.graalvm.polyglot.Context.Builder#allowIO(IOAccess)
*
* @since 20.2.0
*/
static FileSystem newDefaultFileSystem() {
return IOHelper.ImplHolder.IMPL.newDefaultFileSystem(System.getProperty("java.io.tmpdir"));
}
/**
* Decorates the given {@code fileSystem} by an implementation that forwards access to files in
* the language home to the default file system. The method is intended to be used by custom
* filesystem implementations with non default storage to allow guest languages to access files
* in the languages homes. As the returned filesystem uses a default file system to access files
* in the language home, the {@code fileSystem} has to use the same {@link Path} type,
* {@link #getSeparator() separator} and {@link #getPathSeparator() path separator} as the
* {@link #newDefaultFileSystem() default filesystem}.
*
* @throws IllegalArgumentException when the {@code fileSystem} does not use the same
* {@link Path} type or has a different {@link #getSeparator() separator} or
* {@link #getPathSeparator() path separator} as the {@link #newDefaultFileSystem()
* default file system}.
* @since 22.2
* @deprecated Use {{@link #allowInternalResourceAccess(FileSystem)}}.
*/
@Deprecated
static FileSystem allowLanguageHomeAccess(FileSystem fileSystem) {
return allowInternalResourceAccess(fileSystem);
}
/**
* Decorates the given {@code fileSystem} by an implementation that forwards access to the
* internal resources to the default file system. The method is intended to be used by custom
* filesystem implementations with non default storage to allow guest languages to access
* internal resources. As the returned filesystem uses a default file system to access internal
* resources, the {@code fileSystem} has to use the same {@link Path} type,
* {@link #getSeparator() separator} and {@link #getPathSeparator() path separator} as the
* {@link #newDefaultFileSystem() default filesystem}.
*
* @throws IllegalArgumentException when the {@code fileSystem} does not use the same
* {@link Path} type or has a different {@link #getSeparator() separator} or
* {@link #getPathSeparator() path separator} as the {@link #newDefaultFileSystem()
* default file system}.
* @see Engine#copyResources(Path, String...)
* @since 24.0
*/
static FileSystem allowInternalResourceAccess(FileSystem fileSystem) {
return IOHelper.ImplHolder.IMPL.allowInternalResourceAccess(fileSystem);
}
/**
* Decorates the given {@code fileSystem} by an implementation that makes the passed
* {@code fileSystem} read-only by forbidding all write operations. This method can be used to
* make an existing file system, such as the {@link #newDefaultFileSystem() default filesystem},
* read-only.
*
* @since 22.2
*/
static FileSystem newReadOnlyFileSystem(FileSystem fileSystem) {
return IOHelper.ImplHolder.IMPL.newReadOnlyFileSystem(fileSystem);
}
/**
* Creates a {@link FileSystem} implementation based on the given Java NIO filesystem. The
* returned {@link FileSystem} delegates all operations to {@code fileSystem}'s
* {@link FileSystemProvider provider}.
*
* <p>
* The following example shows how to configure {@link Context} so that languages read files
* from a prepared zip file.
*
* <pre>
* Path zipFile = Paths.get("filesystem.zip");
* try (java.nio.file.FileSystem nioFs = FileSystems.newFileSystem(zipFile)) {
* IOAccess ioAccess = IOAccess.newBuilder().fileSystem(FileSystem.newFileSystem(nioFs)).build();
* try (Context ctx = Context.newBuilder().allowIO(ioAccess).build()) {
* Value result = ctx.eval("js", "load('scripts/app.sh'); execute()");
* }
* }
* </pre>
*
* @see IOAccess
* @since 23.0
*/
static FileSystem newFileSystem(java.nio.file.FileSystem fileSystem) {
return IOHelper.ImplHolder.IMPL.newNIOFileSystem(fileSystem);
}
/**
* Creates a {@link FileSystem} that denies all file operations except for path parsing. Any
* attempt to perform file operations such as reading, writing, or deletion will result in a
* {@link SecurityException} being thrown.
* <p>
* Typically, this file system does not need to be explicitly installed to restrict access to
* host file systems. Instead, use {@code Context.newBuilder().allowIO(IOAccess.NONE)}. This
* method is intended primarily for use as a fallback file system in a
* {@link #newCompositeFileSystem(FileSystem, Selector...) composite file system}.
*
* @since 24.2
*/
static FileSystem newDenyIOFileSystem() {
return IOHelper.ImplHolder.IMPL.newDenyIOFileSystem();
}
/**
* Creates a composite {@link FileSystem} that delegates operations to the provided
* {@code delegates}. The {@link FileSystem} of the first {@code delegate} whose
* {@link Selector#test(Path)} method accepts the path is used for the file system operation. If
* no {@code delegate} accepts the path, the {@code fallbackFileSystem} is used.
* <p>
* The {@code fallbackFileSystem} is responsible for parsing {@link Path} objects. All provided
* file systems must use the same {@link Path} type, {@link #getSeparator() separator}, and
* {@link #getPathSeparator() path separator}. If any file system does not meet this
* requirement, an {@link IllegalArgumentException} is thrown.
* <p>
* The composite file system maintains its own notion of the current working directory and
* ensures that the {@link #setCurrentWorkingDirectory(Path)} method is not invoked on any of
* the delegates. When a request to set the current working directory is received, the composite
* file system verifies that the specified path corresponds to an existing directory by
* consulting either the appropriate delegate or the {@code fallbackFileSystem}. If an explicit
* current working directory has been set, the composite file system normalizes and resolves all
* relative paths to absolute paths prior to delegating operations. Conversely, if no explicit
* current working directory is set, the composite file system directly forwards the incoming
* path, whether relative or absolute, to the appropriate delegate. Furthermore, when an
* explicit current working directory is set, the composite file system does not delegate
* {@code toAbsolutePath} operations, as delegates do not maintain an independent notion of the
* current working directory. If the current working directory is unset, {@code toAbsolutePath}
* operations are delegated to the {@code fallbackFileSystem}.
* <p>
* Operations that are independent of path context, including {@code getTempDirectory},
* {@code getSeparator}, and {@code getPathSeparator}, are handled exclusively by the
* {@code fallbackFileSystem}.
*
* @throws IllegalArgumentException if the file systems do not use the same {@link Path} type,
* {@link #getSeparator() separator}, or {@link #getPathSeparator() path separator}
* @since 24.2
*/
static FileSystem newCompositeFileSystem(FileSystem fallbackFileSystem, Selector... delegates) {
return IOHelper.ImplHolder.IMPL.newCompositeFileSystem(fallbackFileSystem, delegates);
}
/**
* A selector for determining which {@link FileSystem} should handle operations on a given
* {@link Path}. This class encapsulates a {@link FileSystem} and defines a condition for
* selecting it.
*
* @since 24.2
*/
abstract class Selector implements Predicate<Path> {
private final FileSystem fileSystem;
/**
* Creates a {@link Selector} for the specified {@link FileSystem}.
*
* @since 24.2
*/
protected Selector(FileSystem fileSystem) {
this.fileSystem = Objects.requireNonNull(fileSystem, "FileSystem must be non-null");
}
/**
* Returns the {@link FileSystem} associated with this selector.
*
* @since 24.2
*/
public final FileSystem getFileSystem() {
return fileSystem;
}
/**
* Tests whether the {@link FileSystem} associated with this selector can handle operations
* on the specified {@link Path}.
*
* @param path the path to test, provided as a normalized absolute path. The given
* {@code path} has no path components equal to {@code "."} or {@code ".."}.
* @return {@code true} if the associated {@link FileSystem} can handle the {@code path};
* {@code false} otherwise
* @since 24.2
*/
public abstract boolean test(Path path);
/**
* Creates a {@link Selector} for the specified {@link FileSystem} using the provided
* {@link Predicate}.
*
* @param fileSystem the {@link FileSystem} to associate with the selector
* @param predicate the condition to determine if the {@link FileSystem} can handle a given
* path
* @return a new {@link Selector} that delegates path testing to the {@code predicate}
* @since 24.2
*/
public static Selector of(FileSystem fileSystem, Predicate<Path> predicate) {
Objects.requireNonNull(predicate, "Predicate must be non-null");
return new Selector(fileSystem) {
@Override
public boolean test(Path path) {
return predicate.test(path);
}
};
}
}
}
|
googleapis/google-cloud-java | 36,779 | java-container/proto-google-cloud-container-v1beta1/src/main/java/com/google/container/v1beta1/UpdateNodePoolRequestOrBuilder.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/container/v1beta1/cluster_service.proto
// Protobuf Java Version: 3.25.8
package com.google.container.v1beta1;
public interface UpdateNodePoolRequestOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.container.v1beta1.UpdateNodePoolRequest)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* Deprecated. The Google Developers Console [project ID or project
* number](https://cloud.google.com/resource-manager/docs/creating-managing-projects).
* This field has been deprecated and replaced by the name field.
* </pre>
*
* <code>string project_id = 1 [deprecated = true];</code>
*
* @deprecated google.container.v1beta1.UpdateNodePoolRequest.project_id is deprecated. See
* google/container/v1beta1/cluster_service.proto;l=4058
* @return The projectId.
*/
@java.lang.Deprecated
java.lang.String getProjectId();
/**
*
*
* <pre>
* Deprecated. The Google Developers Console [project ID or project
* number](https://cloud.google.com/resource-manager/docs/creating-managing-projects).
* This field has been deprecated and replaced by the name field.
* </pre>
*
* <code>string project_id = 1 [deprecated = true];</code>
*
* @deprecated google.container.v1beta1.UpdateNodePoolRequest.project_id is deprecated. See
* google/container/v1beta1/cluster_service.proto;l=4058
* @return The bytes for projectId.
*/
@java.lang.Deprecated
com.google.protobuf.ByteString getProjectIdBytes();
/**
*
*
* <pre>
* Deprecated. The name of the Google Compute Engine
* [zone](https://cloud.google.com/compute/docs/zones#available)
* in which the cluster resides. This field has been deprecated and replaced
* by the name field.
* </pre>
*
* <code>string zone = 2 [deprecated = true];</code>
*
* @deprecated google.container.v1beta1.UpdateNodePoolRequest.zone is deprecated. See
* google/container/v1beta1/cluster_service.proto;l=4064
* @return The zone.
*/
@java.lang.Deprecated
java.lang.String getZone();
/**
*
*
* <pre>
* Deprecated. The name of the Google Compute Engine
* [zone](https://cloud.google.com/compute/docs/zones#available)
* in which the cluster resides. This field has been deprecated and replaced
* by the name field.
* </pre>
*
* <code>string zone = 2 [deprecated = true];</code>
*
* @deprecated google.container.v1beta1.UpdateNodePoolRequest.zone is deprecated. See
* google/container/v1beta1/cluster_service.proto;l=4064
* @return The bytes for zone.
*/
@java.lang.Deprecated
com.google.protobuf.ByteString getZoneBytes();
/**
*
*
* <pre>
* Deprecated. The name of the cluster to upgrade.
* This field has been deprecated and replaced by the name field.
* </pre>
*
* <code>string cluster_id = 3 [deprecated = true];</code>
*
* @deprecated google.container.v1beta1.UpdateNodePoolRequest.cluster_id is deprecated. See
* google/container/v1beta1/cluster_service.proto;l=4068
* @return The clusterId.
*/
@java.lang.Deprecated
java.lang.String getClusterId();
/**
*
*
* <pre>
* Deprecated. The name of the cluster to upgrade.
* This field has been deprecated and replaced by the name field.
* </pre>
*
* <code>string cluster_id = 3 [deprecated = true];</code>
*
* @deprecated google.container.v1beta1.UpdateNodePoolRequest.cluster_id is deprecated. See
* google/container/v1beta1/cluster_service.proto;l=4068
* @return The bytes for clusterId.
*/
@java.lang.Deprecated
com.google.protobuf.ByteString getClusterIdBytes();
/**
*
*
* <pre>
* Deprecated. The name of the node pool to upgrade.
* This field has been deprecated and replaced by the name field.
* </pre>
*
* <code>string node_pool_id = 4 [deprecated = true];</code>
*
* @deprecated google.container.v1beta1.UpdateNodePoolRequest.node_pool_id is deprecated. See
* google/container/v1beta1/cluster_service.proto;l=4072
* @return The nodePoolId.
*/
@java.lang.Deprecated
java.lang.String getNodePoolId();
/**
*
*
* <pre>
* Deprecated. The name of the node pool to upgrade.
* This field has been deprecated and replaced by the name field.
* </pre>
*
* <code>string node_pool_id = 4 [deprecated = true];</code>
*
* @deprecated google.container.v1beta1.UpdateNodePoolRequest.node_pool_id is deprecated. See
* google/container/v1beta1/cluster_service.proto;l=4072
* @return The bytes for nodePoolId.
*/
@java.lang.Deprecated
com.google.protobuf.ByteString getNodePoolIdBytes();
/**
*
*
* <pre>
* Required. The Kubernetes version to change the nodes to (typically an
* upgrade).
*
* Users may specify either explicit versions offered by Kubernetes Engine or
* version aliases, which have the following behavior:
*
* - "latest": picks the highest valid Kubernetes version
* - "1.X": picks the highest valid patch+gke.N patch in the 1.X version
* - "1.X.Y": picks the highest valid gke.N patch in the 1.X.Y version
* - "1.X.Y-gke.N": picks an explicit Kubernetes version
* - "-": picks the Kubernetes master version
* </pre>
*
* <code>string node_version = 5 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The nodeVersion.
*/
java.lang.String getNodeVersion();
/**
*
*
* <pre>
* Required. The Kubernetes version to change the nodes to (typically an
* upgrade).
*
* Users may specify either explicit versions offered by Kubernetes Engine or
* version aliases, which have the following behavior:
*
* - "latest": picks the highest valid Kubernetes version
* - "1.X": picks the highest valid patch+gke.N patch in the 1.X version
* - "1.X.Y": picks the highest valid gke.N patch in the 1.X.Y version
* - "1.X.Y-gke.N": picks an explicit Kubernetes version
* - "-": picks the Kubernetes master version
* </pre>
*
* <code>string node_version = 5 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for nodeVersion.
*/
com.google.protobuf.ByteString getNodeVersionBytes();
/**
*
*
* <pre>
* Required. The desired image type for the node pool. Please see
* https://cloud.google.com/kubernetes-engine/docs/concepts/node-images
* for available image types.
* </pre>
*
* <code>string image_type = 6 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The imageType.
*/
java.lang.String getImageType();
/**
*
*
* <pre>
* Required. The desired image type for the node pool. Please see
* https://cloud.google.com/kubernetes-engine/docs/concepts/node-images
* for available image types.
* </pre>
*
* <code>string image_type = 6 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for imageType.
*/
com.google.protobuf.ByteString getImageTypeBytes();
/**
*
*
* <pre>
* The desired list of Google Compute Engine
* [zones](https://cloud.google.com/compute/docs/zones#available)
* in which the node pool's nodes should be located. Changing the locations
* for a node pool will result in nodes being either created or removed from
* the node pool, depending on whether locations are being added or removed.
* </pre>
*
* <code>repeated string locations = 13;</code>
*
* @return A list containing the locations.
*/
java.util.List<java.lang.String> getLocationsList();
/**
*
*
* <pre>
* The desired list of Google Compute Engine
* [zones](https://cloud.google.com/compute/docs/zones#available)
* in which the node pool's nodes should be located. Changing the locations
* for a node pool will result in nodes being either created or removed from
* the node pool, depending on whether locations are being added or removed.
* </pre>
*
* <code>repeated string locations = 13;</code>
*
* @return The count of locations.
*/
int getLocationsCount();
/**
*
*
* <pre>
* The desired list of Google Compute Engine
* [zones](https://cloud.google.com/compute/docs/zones#available)
* in which the node pool's nodes should be located. Changing the locations
* for a node pool will result in nodes being either created or removed from
* the node pool, depending on whether locations are being added or removed.
* </pre>
*
* <code>repeated string locations = 13;</code>
*
* @param index The index of the element to return.
* @return The locations at the given index.
*/
java.lang.String getLocations(int index);
/**
*
*
* <pre>
* The desired list of Google Compute Engine
* [zones](https://cloud.google.com/compute/docs/zones#available)
* in which the node pool's nodes should be located. Changing the locations
* for a node pool will result in nodes being either created or removed from
* the node pool, depending on whether locations are being added or removed.
* </pre>
*
* <code>repeated string locations = 13;</code>
*
* @param index The index of the value to return.
* @return The bytes of the locations at the given index.
*/
com.google.protobuf.ByteString getLocationsBytes(int index);
/**
*
*
* <pre>
* The desired workload metadata config for the node pool.
* </pre>
*
* <code>.google.container.v1beta1.WorkloadMetadataConfig workload_metadata_config = 14;</code>
*
* @return Whether the workloadMetadataConfig field is set.
*/
boolean hasWorkloadMetadataConfig();
/**
*
*
* <pre>
* The desired workload metadata config for the node pool.
* </pre>
*
* <code>.google.container.v1beta1.WorkloadMetadataConfig workload_metadata_config = 14;</code>
*
* @return The workloadMetadataConfig.
*/
com.google.container.v1beta1.WorkloadMetadataConfig getWorkloadMetadataConfig();
/**
*
*
* <pre>
* The desired workload metadata config for the node pool.
* </pre>
*
* <code>.google.container.v1beta1.WorkloadMetadataConfig workload_metadata_config = 14;</code>
*/
com.google.container.v1beta1.WorkloadMetadataConfigOrBuilder getWorkloadMetadataConfigOrBuilder();
/**
*
*
* <pre>
* The name (project, location, cluster, node pool) of the node pool to
* update. Specified in the format
* `projects/*/locations/*/clusters/*/nodePools/*`.
* </pre>
*
* <code>string name = 8;</code>
*
* @return The name.
*/
java.lang.String getName();
/**
*
*
* <pre>
* The name (project, location, cluster, node pool) of the node pool to
* update. Specified in the format
* `projects/*/locations/*/clusters/*/nodePools/*`.
* </pre>
*
* <code>string name = 8;</code>
*
* @return The bytes for name.
*/
com.google.protobuf.ByteString getNameBytes();
/**
*
*
* <pre>
* Upgrade settings control disruption and speed of the upgrade.
* </pre>
*
* <code>.google.container.v1beta1.NodePool.UpgradeSettings upgrade_settings = 15;</code>
*
* @return Whether the upgradeSettings field is set.
*/
boolean hasUpgradeSettings();
/**
*
*
* <pre>
* Upgrade settings control disruption and speed of the upgrade.
* </pre>
*
* <code>.google.container.v1beta1.NodePool.UpgradeSettings upgrade_settings = 15;</code>
*
* @return The upgradeSettings.
*/
com.google.container.v1beta1.NodePool.UpgradeSettings getUpgradeSettings();
/**
*
*
* <pre>
* Upgrade settings control disruption and speed of the upgrade.
* </pre>
*
* <code>.google.container.v1beta1.NodePool.UpgradeSettings upgrade_settings = 15;</code>
*/
com.google.container.v1beta1.NodePool.UpgradeSettingsOrBuilder getUpgradeSettingsOrBuilder();
/**
*
*
* <pre>
* The desired network tags to be applied to all nodes in the node pool.
* If this field is not present, the tags will not be changed. Otherwise,
* the existing network tags will be *replaced* with the provided tags.
* </pre>
*
* <code>.google.container.v1beta1.NetworkTags tags = 16;</code>
*
* @return Whether the tags field is set.
*/
boolean hasTags();
/**
*
*
* <pre>
* The desired network tags to be applied to all nodes in the node pool.
* If this field is not present, the tags will not be changed. Otherwise,
* the existing network tags will be *replaced* with the provided tags.
* </pre>
*
* <code>.google.container.v1beta1.NetworkTags tags = 16;</code>
*
* @return The tags.
*/
com.google.container.v1beta1.NetworkTags getTags();
/**
*
*
* <pre>
* The desired network tags to be applied to all nodes in the node pool.
* If this field is not present, the tags will not be changed. Otherwise,
* the existing network tags will be *replaced* with the provided tags.
* </pre>
*
* <code>.google.container.v1beta1.NetworkTags tags = 16;</code>
*/
com.google.container.v1beta1.NetworkTagsOrBuilder getTagsOrBuilder();
/**
*
*
* <pre>
* The desired node taints to be applied to all nodes in the node pool.
* If this field is not present, the taints will not be changed. Otherwise,
* the existing node taints will be *replaced* with the provided taints.
* </pre>
*
* <code>.google.container.v1beta1.NodeTaints taints = 17;</code>
*
* @return Whether the taints field is set.
*/
boolean hasTaints();
/**
*
*
* <pre>
* The desired node taints to be applied to all nodes in the node pool.
* If this field is not present, the taints will not be changed. Otherwise,
* the existing node taints will be *replaced* with the provided taints.
* </pre>
*
* <code>.google.container.v1beta1.NodeTaints taints = 17;</code>
*
* @return The taints.
*/
com.google.container.v1beta1.NodeTaints getTaints();
/**
*
*
* <pre>
* The desired node taints to be applied to all nodes in the node pool.
* If this field is not present, the taints will not be changed. Otherwise,
* the existing node taints will be *replaced* with the provided taints.
* </pre>
*
* <code>.google.container.v1beta1.NodeTaints taints = 17;</code>
*/
com.google.container.v1beta1.NodeTaintsOrBuilder getTaintsOrBuilder();
/**
*
*
* <pre>
* The desired node labels to be applied to all nodes in the node pool.
* If this field is not present, the labels will not be changed. Otherwise,
* the existing node labels will be *replaced* with the provided labels.
* </pre>
*
* <code>.google.container.v1beta1.NodeLabels labels = 18;</code>
*
* @return Whether the labels field is set.
*/
boolean hasLabels();
/**
*
*
* <pre>
* The desired node labels to be applied to all nodes in the node pool.
* If this field is not present, the labels will not be changed. Otherwise,
* the existing node labels will be *replaced* with the provided labels.
* </pre>
*
* <code>.google.container.v1beta1.NodeLabels labels = 18;</code>
*
* @return The labels.
*/
com.google.container.v1beta1.NodeLabels getLabels();
/**
*
*
* <pre>
* The desired node labels to be applied to all nodes in the node pool.
* If this field is not present, the labels will not be changed. Otherwise,
* the existing node labels will be *replaced* with the provided labels.
* </pre>
*
* <code>.google.container.v1beta1.NodeLabels labels = 18;</code>
*/
com.google.container.v1beta1.NodeLabelsOrBuilder getLabelsOrBuilder();
/**
*
*
* <pre>
* Parameters that can be configured on Linux nodes.
* </pre>
*
* <code>.google.container.v1beta1.LinuxNodeConfig linux_node_config = 19;</code>
*
* @return Whether the linuxNodeConfig field is set.
*/
boolean hasLinuxNodeConfig();
/**
*
*
* <pre>
* Parameters that can be configured on Linux nodes.
* </pre>
*
* <code>.google.container.v1beta1.LinuxNodeConfig linux_node_config = 19;</code>
*
* @return The linuxNodeConfig.
*/
com.google.container.v1beta1.LinuxNodeConfig getLinuxNodeConfig();
/**
*
*
* <pre>
* Parameters that can be configured on Linux nodes.
* </pre>
*
* <code>.google.container.v1beta1.LinuxNodeConfig linux_node_config = 19;</code>
*/
com.google.container.v1beta1.LinuxNodeConfigOrBuilder getLinuxNodeConfigOrBuilder();
/**
*
*
* <pre>
* Node kubelet configs.
* </pre>
*
* <code>.google.container.v1beta1.NodeKubeletConfig kubelet_config = 20;</code>
*
* @return Whether the kubeletConfig field is set.
*/
boolean hasKubeletConfig();
/**
*
*
* <pre>
* Node kubelet configs.
* </pre>
*
* <code>.google.container.v1beta1.NodeKubeletConfig kubelet_config = 20;</code>
*
* @return The kubeletConfig.
*/
com.google.container.v1beta1.NodeKubeletConfig getKubeletConfig();
/**
*
*
* <pre>
* Node kubelet configs.
* </pre>
*
* <code>.google.container.v1beta1.NodeKubeletConfig kubelet_config = 20;</code>
*/
com.google.container.v1beta1.NodeKubeletConfigOrBuilder getKubeletConfigOrBuilder();
/**
*
*
* <pre>
* Node network config.
* </pre>
*
* <code>.google.container.v1beta1.NodeNetworkConfig node_network_config = 21;</code>
*
* @return Whether the nodeNetworkConfig field is set.
*/
boolean hasNodeNetworkConfig();
/**
*
*
* <pre>
* Node network config.
* </pre>
*
* <code>.google.container.v1beta1.NodeNetworkConfig node_network_config = 21;</code>
*
* @return The nodeNetworkConfig.
*/
com.google.container.v1beta1.NodeNetworkConfig getNodeNetworkConfig();
/**
*
*
* <pre>
* Node network config.
* </pre>
*
* <code>.google.container.v1beta1.NodeNetworkConfig node_network_config = 21;</code>
*/
com.google.container.v1beta1.NodeNetworkConfigOrBuilder getNodeNetworkConfigOrBuilder();
/**
*
*
* <pre>
* GCFS config.
* </pre>
*
* <code>.google.container.v1beta1.GcfsConfig gcfs_config = 22;</code>
*
* @return Whether the gcfsConfig field is set.
*/
boolean hasGcfsConfig();
/**
*
*
* <pre>
* GCFS config.
* </pre>
*
* <code>.google.container.v1beta1.GcfsConfig gcfs_config = 22;</code>
*
* @return The gcfsConfig.
*/
com.google.container.v1beta1.GcfsConfig getGcfsConfig();
/**
*
*
* <pre>
* GCFS config.
* </pre>
*
* <code>.google.container.v1beta1.GcfsConfig gcfs_config = 22;</code>
*/
com.google.container.v1beta1.GcfsConfigOrBuilder getGcfsConfigOrBuilder();
/**
*
*
* <pre>
* Confidential nodes config.
* All the nodes in the node pool will be Confidential VM once enabled.
* </pre>
*
* <code>.google.container.v1beta1.ConfidentialNodes confidential_nodes = 23;</code>
*
* @return Whether the confidentialNodes field is set.
*/
boolean hasConfidentialNodes();
/**
*
*
* <pre>
* Confidential nodes config.
* All the nodes in the node pool will be Confidential VM once enabled.
* </pre>
*
* <code>.google.container.v1beta1.ConfidentialNodes confidential_nodes = 23;</code>
*
* @return The confidentialNodes.
*/
com.google.container.v1beta1.ConfidentialNodes getConfidentialNodes();
/**
*
*
* <pre>
* Confidential nodes config.
* All the nodes in the node pool will be Confidential VM once enabled.
* </pre>
*
* <code>.google.container.v1beta1.ConfidentialNodes confidential_nodes = 23;</code>
*/
com.google.container.v1beta1.ConfidentialNodesOrBuilder getConfidentialNodesOrBuilder();
/**
*
*
* <pre>
* Enable or disable gvnic on the node pool.
* </pre>
*
* <code>.google.container.v1beta1.VirtualNIC gvnic = 29;</code>
*
* @return Whether the gvnic field is set.
*/
boolean hasGvnic();
/**
*
*
* <pre>
* Enable or disable gvnic on the node pool.
* </pre>
*
* <code>.google.container.v1beta1.VirtualNIC gvnic = 29;</code>
*
* @return The gvnic.
*/
com.google.container.v1beta1.VirtualNIC getGvnic();
/**
*
*
* <pre>
* Enable or disable gvnic on the node pool.
* </pre>
*
* <code>.google.container.v1beta1.VirtualNIC gvnic = 29;</code>
*/
com.google.container.v1beta1.VirtualNICOrBuilder getGvnicOrBuilder();
/**
*
*
* <pre>
* The current etag of the node pool.
* If an etag is provided and does not match the current etag of the node
* pool, update will be blocked and an ABORTED error will be returned.
* </pre>
*
* <code>string etag = 30;</code>
*
* @return The etag.
*/
java.lang.String getEtag();
/**
*
*
* <pre>
* The current etag of the node pool.
* If an etag is provided and does not match the current etag of the node
* pool, update will be blocked and an ABORTED error will be returned.
* </pre>
*
* <code>string etag = 30;</code>
*
* @return The bytes for etag.
*/
com.google.protobuf.ByteString getEtagBytes();
/**
*
*
* <pre>
* Enable or disable NCCL fast socket for the node pool.
* </pre>
*
* <code>.google.container.v1beta1.FastSocket fast_socket = 31;</code>
*
* @return Whether the fastSocket field is set.
*/
boolean hasFastSocket();
/**
*
*
* <pre>
* Enable or disable NCCL fast socket for the node pool.
* </pre>
*
* <code>.google.container.v1beta1.FastSocket fast_socket = 31;</code>
*
* @return The fastSocket.
*/
com.google.container.v1beta1.FastSocket getFastSocket();
/**
*
*
* <pre>
* Enable or disable NCCL fast socket for the node pool.
* </pre>
*
* <code>.google.container.v1beta1.FastSocket fast_socket = 31;</code>
*/
com.google.container.v1beta1.FastSocketOrBuilder getFastSocketOrBuilder();
/**
*
*
* <pre>
* Logging configuration.
* </pre>
*
* <code>.google.container.v1beta1.NodePoolLoggingConfig logging_config = 32;</code>
*
* @return Whether the loggingConfig field is set.
*/
boolean hasLoggingConfig();
/**
*
*
* <pre>
* Logging configuration.
* </pre>
*
* <code>.google.container.v1beta1.NodePoolLoggingConfig logging_config = 32;</code>
*
* @return The loggingConfig.
*/
com.google.container.v1beta1.NodePoolLoggingConfig getLoggingConfig();
/**
*
*
* <pre>
* Logging configuration.
* </pre>
*
* <code>.google.container.v1beta1.NodePoolLoggingConfig logging_config = 32;</code>
*/
com.google.container.v1beta1.NodePoolLoggingConfigOrBuilder getLoggingConfigOrBuilder();
/**
*
*
* <pre>
* The resource labels for the node pool to use to annotate any related
* Google Compute Engine resources.
* </pre>
*
* <code>.google.container.v1beta1.ResourceLabels resource_labels = 33;</code>
*
* @return Whether the resourceLabels field is set.
*/
boolean hasResourceLabels();
/**
*
*
* <pre>
* The resource labels for the node pool to use to annotate any related
* Google Compute Engine resources.
* </pre>
*
* <code>.google.container.v1beta1.ResourceLabels resource_labels = 33;</code>
*
* @return The resourceLabels.
*/
com.google.container.v1beta1.ResourceLabels getResourceLabels();
/**
*
*
* <pre>
* The resource labels for the node pool to use to annotate any related
* Google Compute Engine resources.
* </pre>
*
* <code>.google.container.v1beta1.ResourceLabels resource_labels = 33;</code>
*/
com.google.container.v1beta1.ResourceLabelsOrBuilder getResourceLabelsOrBuilder();
/**
*
*
* <pre>
* Parameters that can be configured on Windows nodes.
* </pre>
*
* <code>.google.container.v1beta1.WindowsNodeConfig windows_node_config = 34;</code>
*
* @return Whether the windowsNodeConfig field is set.
*/
boolean hasWindowsNodeConfig();
/**
*
*
* <pre>
* Parameters that can be configured on Windows nodes.
* </pre>
*
* <code>.google.container.v1beta1.WindowsNodeConfig windows_node_config = 34;</code>
*
* @return The windowsNodeConfig.
*/
com.google.container.v1beta1.WindowsNodeConfig getWindowsNodeConfig();
/**
*
*
* <pre>
* Parameters that can be configured on Windows nodes.
* </pre>
*
* <code>.google.container.v1beta1.WindowsNodeConfig windows_node_config = 34;</code>
*/
com.google.container.v1beta1.WindowsNodeConfigOrBuilder getWindowsNodeConfigOrBuilder();
/**
*
*
* <pre>
* A list of hardware accelerators to be attached to each node.
* See
* https://cloud.google.com/compute/docs/gpus
* for more information about support for GPUs.
* </pre>
*
* <code>repeated .google.container.v1beta1.AcceleratorConfig accelerators = 35;</code>
*/
java.util.List<com.google.container.v1beta1.AcceleratorConfig> getAcceleratorsList();
/**
*
*
* <pre>
* A list of hardware accelerators to be attached to each node.
* See
* https://cloud.google.com/compute/docs/gpus
* for more information about support for GPUs.
* </pre>
*
* <code>repeated .google.container.v1beta1.AcceleratorConfig accelerators = 35;</code>
*/
com.google.container.v1beta1.AcceleratorConfig getAccelerators(int index);
/**
*
*
* <pre>
* A list of hardware accelerators to be attached to each node.
* See
* https://cloud.google.com/compute/docs/gpus
* for more information about support for GPUs.
* </pre>
*
* <code>repeated .google.container.v1beta1.AcceleratorConfig accelerators = 35;</code>
*/
int getAcceleratorsCount();
/**
*
*
* <pre>
* A list of hardware accelerators to be attached to each node.
* See
* https://cloud.google.com/compute/docs/gpus
* for more information about support for GPUs.
* </pre>
*
* <code>repeated .google.container.v1beta1.AcceleratorConfig accelerators = 35;</code>
*/
java.util.List<? extends com.google.container.v1beta1.AcceleratorConfigOrBuilder>
getAcceleratorsOrBuilderList();
/**
*
*
* <pre>
* A list of hardware accelerators to be attached to each node.
* See
* https://cloud.google.com/compute/docs/gpus
* for more information about support for GPUs.
* </pre>
*
* <code>repeated .google.container.v1beta1.AcceleratorConfig accelerators = 35;</code>
*/
com.google.container.v1beta1.AcceleratorConfigOrBuilder getAcceleratorsOrBuilder(int index);
/**
*
*
* <pre>
* Optional. The desired machine type for nodes in the node pool.
* Initiates an upgrade operation that migrates the nodes in the
* node pool to the specified machine type.
* </pre>
*
* <code>string machine_type = 36 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The machineType.
*/
java.lang.String getMachineType();
/**
*
*
* <pre>
* Optional. The desired machine type for nodes in the node pool.
* Initiates an upgrade operation that migrates the nodes in the
* node pool to the specified machine type.
* </pre>
*
* <code>string machine_type = 36 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for machineType.
*/
com.google.protobuf.ByteString getMachineTypeBytes();
/**
*
*
* <pre>
* Optional. The desired disk type for nodes in the node pool.
* Initiates an upgrade operation that migrates the nodes in the
* node pool to the specified disk type.
* </pre>
*
* <code>string disk_type = 37 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The diskType.
*/
java.lang.String getDiskType();
/**
*
*
* <pre>
* Optional. The desired disk type for nodes in the node pool.
* Initiates an upgrade operation that migrates the nodes in the
* node pool to the specified disk type.
* </pre>
*
* <code>string disk_type = 37 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for diskType.
*/
com.google.protobuf.ByteString getDiskTypeBytes();
/**
*
*
* <pre>
* Optional. The desired disk size for nodes in the node pool.
* Initiates an upgrade operation that migrates the nodes in the
* node pool to the specified disk size.
* </pre>
*
* <code>int64 disk_size_gb = 38 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The diskSizeGb.
*/
long getDiskSizeGb();
/**
*
*
* <pre>
* Desired resource manager tag keys and values to be attached to the nodes
* for managing Compute Engine firewalls using Network Firewall Policies.
* Existing tags will be replaced with new values.
* </pre>
*
* <code>.google.container.v1beta1.ResourceManagerTags resource_manager_tags = 39;</code>
*
* @return Whether the resourceManagerTags field is set.
*/
boolean hasResourceManagerTags();
/**
*
*
* <pre>
* Desired resource manager tag keys and values to be attached to the nodes
* for managing Compute Engine firewalls using Network Firewall Policies.
* Existing tags will be replaced with new values.
* </pre>
*
* <code>.google.container.v1beta1.ResourceManagerTags resource_manager_tags = 39;</code>
*
* @return The resourceManagerTags.
*/
com.google.container.v1beta1.ResourceManagerTags getResourceManagerTags();
/**
*
*
* <pre>
* Desired resource manager tag keys and values to be attached to the nodes
* for managing Compute Engine firewalls using Network Firewall Policies.
* Existing tags will be replaced with new values.
* </pre>
*
* <code>.google.container.v1beta1.ResourceManagerTags resource_manager_tags = 39;</code>
*/
com.google.container.v1beta1.ResourceManagerTagsOrBuilder getResourceManagerTagsOrBuilder();
/**
*
*
* <pre>
* The desired containerd config for nodes in the node pool.
* Initiates an upgrade operation that recreates the nodes with the new
* config.
* </pre>
*
* <code>.google.container.v1beta1.ContainerdConfig containerd_config = 40;</code>
*
* @return Whether the containerdConfig field is set.
*/
boolean hasContainerdConfig();
/**
*
*
* <pre>
* The desired containerd config for nodes in the node pool.
* Initiates an upgrade operation that recreates the nodes with the new
* config.
* </pre>
*
* <code>.google.container.v1beta1.ContainerdConfig containerd_config = 40;</code>
*
* @return The containerdConfig.
*/
com.google.container.v1beta1.ContainerdConfig getContainerdConfig();
/**
*
*
* <pre>
* The desired containerd config for nodes in the node pool.
* Initiates an upgrade operation that recreates the nodes with the new
* config.
* </pre>
*
* <code>.google.container.v1beta1.ContainerdConfig containerd_config = 40;</code>
*/
com.google.container.v1beta1.ContainerdConfigOrBuilder getContainerdConfigOrBuilder();
/**
*
*
* <pre>
* Specifies the configuration of queued provisioning.
* </pre>
*
* <code>.google.container.v1beta1.NodePool.QueuedProvisioning queued_provisioning = 42;</code>
*
* @return Whether the queuedProvisioning field is set.
*/
boolean hasQueuedProvisioning();
/**
*
*
* <pre>
* Specifies the configuration of queued provisioning.
* </pre>
*
* <code>.google.container.v1beta1.NodePool.QueuedProvisioning queued_provisioning = 42;</code>
*
* @return The queuedProvisioning.
*/
com.google.container.v1beta1.NodePool.QueuedProvisioning getQueuedProvisioning();
/**
*
*
* <pre>
* Specifies the configuration of queued provisioning.
* </pre>
*
* <code>.google.container.v1beta1.NodePool.QueuedProvisioning queued_provisioning = 42;</code>
*/
com.google.container.v1beta1.NodePool.QueuedProvisioningOrBuilder
getQueuedProvisioningOrBuilder();
/**
*
*
* <pre>
* List of Storage Pools where boot disks are provisioned.
* Existing Storage Pools will be replaced with storage-pools.
* </pre>
*
* <code>repeated string storage_pools = 43;</code>
*
* @return A list containing the storagePools.
*/
java.util.List<java.lang.String> getStoragePoolsList();
/**
*
*
* <pre>
* List of Storage Pools where boot disks are provisioned.
* Existing Storage Pools will be replaced with storage-pools.
* </pre>
*
* <code>repeated string storage_pools = 43;</code>
*
* @return The count of storagePools.
*/
int getStoragePoolsCount();
/**
*
*
* <pre>
* List of Storage Pools where boot disks are provisioned.
* Existing Storage Pools will be replaced with storage-pools.
* </pre>
*
* <code>repeated string storage_pools = 43;</code>
*
* @param index The index of the element to return.
* @return The storagePools at the given index.
*/
java.lang.String getStoragePools(int index);
/**
*
*
* <pre>
* List of Storage Pools where boot disks are provisioned.
* Existing Storage Pools will be replaced with storage-pools.
* </pre>
*
* <code>repeated string storage_pools = 43;</code>
*
* @param index The index of the value to return.
* @return The bytes of the storagePools at the given index.
*/
com.google.protobuf.ByteString getStoragePoolsBytes(int index);
/**
*
*
* <pre>
* The maximum duration for the nodes to exist.
* If unspecified, the nodes can exist indefinitely.
* </pre>
*
* <code>.google.protobuf.Duration max_run_duration = 45;</code>
*
* @return Whether the maxRunDuration field is set.
*/
boolean hasMaxRunDuration();
/**
*
*
* <pre>
* The maximum duration for the nodes to exist.
* If unspecified, the nodes can exist indefinitely.
* </pre>
*
* <code>.google.protobuf.Duration max_run_duration = 45;</code>
*
* @return The maxRunDuration.
*/
com.google.protobuf.Duration getMaxRunDuration();
/**
*
*
* <pre>
* The maximum duration for the nodes to exist.
* If unspecified, the nodes can exist indefinitely.
* </pre>
*
* <code>.google.protobuf.Duration max_run_duration = 45;</code>
*/
com.google.protobuf.DurationOrBuilder getMaxRunDurationOrBuilder();
/**
*
*
* <pre>
* Flex Start flag for enabling Flex Start VM.
* </pre>
*
* <code>optional bool flex_start = 46;</code>
*
* @return Whether the flexStart field is set.
*/
boolean hasFlexStart();
/**
*
*
* <pre>
* Flex Start flag for enabling Flex Start VM.
* </pre>
*
* <code>optional bool flex_start = 46;</code>
*
* @return The flexStart.
*/
boolean getFlexStart();
/**
*
*
* <pre>
* The desired boot disk config for nodes in the node pool.
* Initiates an upgrade operation that migrates the nodes in the
* node pool to the specified boot disk config.
* </pre>
*
* <code>.google.container.v1beta1.BootDisk boot_disk = 47;</code>
*
* @return Whether the bootDisk field is set.
*/
boolean hasBootDisk();
/**
*
*
* <pre>
* The desired boot disk config for nodes in the node pool.
* Initiates an upgrade operation that migrates the nodes in the
* node pool to the specified boot disk config.
* </pre>
*
* <code>.google.container.v1beta1.BootDisk boot_disk = 47;</code>
*
* @return The bootDisk.
*/
com.google.container.v1beta1.BootDisk getBootDisk();
/**
*
*
* <pre>
* The desired boot disk config for nodes in the node pool.
* Initiates an upgrade operation that migrates the nodes in the
* node pool to the specified boot disk config.
* </pre>
*
* <code>.google.container.v1beta1.BootDisk boot_disk = 47;</code>
*/
com.google.container.v1beta1.BootDiskOrBuilder getBootDiskOrBuilder();
}
|
apache/juneau | 33,910 | juneau-utest/src/test/java/org/apache/juneau/rest/annotation/Restx_RoleGuard_Test.java | // ***************************************************************************************************************************
// * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file *
// * distributed with this work for additional information regarding copyright ownership. The ASF licenses this file *
// * to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance *
// * with the License. You may obtain a copy of the License at *
// * *
// * http://www.apache.org/licenses/LICENSE-2.0 *
// * *
// * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an *
// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the *
// * specific language governing permissions and limitations under the License. *
// ***************************************************************************************************************************
package org.apache.juneau.rest.annotation;
import org.apache.juneau.*;
import org.apache.juneau.rest.client.*;
import org.apache.juneau.rest.mock.*;
import org.junit.jupiter.api.*;
class Restx_RoleGuard_Test extends TestBase {
private static RestOperation[] ops(RestOperation...ops) {
return ops;
}
private static RestOperation op(String method, String url) {
return RestOperation.of(method, url);
}
//-----------------------------------------------------------------------------------------------------------------
// Simple guard on class
//-----------------------------------------------------------------------------------------------------------------
@Rest(roleGuard="foo")
public static class A1 {
@RestOp
public String a() {
return "OK";
}
@RestGet
public String b() {
return "OK";
}
@RestPut
public String c() {
return "OK";
}
@RestPost
public String d() {
return "OK";
}
@RestDelete
public String e() {
return "OK";
}
}
@Test void a01a_onClass_simple() throws Exception {
var a1 = MockRestClient.buildLax(A1.class);
for (RestOperation op : ops(op("get","/a"),op("get","/b"),op("put","/c"),op("post","/d"),op("delete","/e"))) {
a1.request(op).roles("foo").run().assertStatus(200);
a1.request(op).roles("foo","bar").run().assertStatus(200);
a1.request(op).roles("bar","foo").run().assertStatus(200);
a1.request(op).run().assertStatus(403);
a1.request(op).roles("foo2").run().assertStatus(403);
a1.request(op).roles("foo2","bar").run().assertStatus(403);
}
}
//-----------------------------------------------------------------------------------------------------------------
// Simple guard on method
//-----------------------------------------------------------------------------------------------------------------
@Rest
public static class A2 {
@RestOp(roleGuard="foo")
public String a() {
return "OK";
}
@RestGet(roleGuard="foo")
public String b() {
return "OK";
}
@RestPut(roleGuard="foo")
public String c() {
return "OK";
}
@RestPost(roleGuard="foo")
public String d() {
return "OK";
}
@RestDelete(roleGuard="foo")
public String e() {
return "OK";
}
}
@Test void a02a_onMethod_simple() throws Exception {
var a2 = MockRestClient.buildLax(A2.class);
for (RestOperation op : ops(op("get","/a"),op("get","/b"),op("put","/c"),op("post","/d"),op("delete","/e"))) {
a2.request(op).roles("foo").run().assertStatus(200);
a2.request(op).roles("foo","bar").run().assertStatus(200);
a2.request(op).roles("bar","foo").run().assertStatus(200);
a2.request(op).run().assertStatus(403);
a2.request(op).roles("foo2").run().assertStatus(403);
a2.request(op).roles("foo2","bar").run().assertStatus(403);
}
}
//-----------------------------------------------------------------------------------------------------------------
// Simple guards on class and method
//-----------------------------------------------------------------------------------------------------------------
@Rest(roleGuard="foo")
public static class A3 {
@RestOp(roleGuard="bar")
public String a() {
return "OK";
}
@RestGet(roleGuard="bar")
public String b() {
return "OK";
}
@RestPut(roleGuard="bar")
public String c() {
return "OK";
}
@RestPost(roleGuard="bar")
public String d() {
return "OK";
}
@RestDelete(roleGuard="bar")
public String e() {
return "OK";
}
}
@Test void a03a_onBoth_simple() throws Exception {
var a3 = MockRestClient.buildLax(A3.class);
for (RestOperation op : ops(op("get","/a"),op("get","/b"),op("put","/c"),op("post","/d"),op("delete","/e"))) {
a3.request(op).roles("foo","bar").run().assertStatus(200);
a3.request(op).roles("bar","foo").run().assertStatus(200);
a3.request(op).roles("bar","foo","baz").run().assertStatus(200);
a3.request(op).run().assertStatus(403);
a3.request(op).roles("foo").run().assertStatus(403);
a3.request(op).roles("bar").run().assertStatus(403);
a3.request(op).roles("foo2").run().assertStatus(403);
a3.request(op).roles("foo2","bar").run().assertStatus(403);
}
}
//-----------------------------------------------------------------------------------------------------------------
// Simple guards on class and method, inherited
//-----------------------------------------------------------------------------------------------------------------
@Rest(roleGuard="foo")
public static class A4a {
@RestOp(roleGuard="bar")
public String a() {
return "OK";
}
@RestGet(roleGuard="bar")
public String b() {
return "OK";
}
@RestPut(roleGuard="bar")
public String c() {
return "OK";
}
@RestPost(roleGuard="bar")
public String d() {
return "OK";
}
@RestDelete(roleGuard="bar")
public String e() {
return "OK";
}
}
@Rest(roleGuard="baz")
public static class A4b extends A4a {
@Override
@RestOp(roleGuard="qux")
public String a() {
return "OK";
}
@Override
@RestGet(roleGuard="qux")
public String b() {
return "OK";
}
@Override
@RestPut(roleGuard="qux")
public String c() {
return "OK";
}
@Override
@RestPost(roleGuard="qux")
public String d() {
return "OK";
}
@Override
@RestDelete(roleGuard="qux")
public String e() {
return "OK";
}
}
@Test void a04a_inheritence_simple() throws Exception {
var a4 = MockRestClient.buildLax(A4b.class);
for (RestOperation op : ops(op("get","/a"),op("get","/b"),op("put","/c"),op("post","/d"),op("delete","/e"))) {
a4.request(op).roles("foo","bar","baz","qux").run().assertStatus(200);
a4.request(op).roles("foo","bar","baz","qux","quux").run().assertStatus(200);
a4.request(op).roles("foo","bar","baz").run().assertStatus(403);
a4.request(op).roles("foo","bar","qux").run().assertStatus(403);
a4.request(op).roles("foo","baz","qux").run().assertStatus(403);
a4.request(op).roles("bar","baz","qux").run().assertStatus(403);
}
}
//-----------------------------------------------------------------------------------------------------------------
// @Rest(roleGuard), multiple guards on class
//-----------------------------------------------------------------------------------------------------------------
@Rest
public static class B1 {
@RestOp
public String a() {
return "OK";
}
@RestGet
public String b() {
return "OK";
}
@RestPut
public String c() {
return "OK";
}
@RestPost
public String d() {
return "OK";
}
@RestDelete
public String e() {
return "OK";
}
}
@Rest(roleGuard="foo,bar")
public static class B1a extends B1 {}
@Test void b01a_orsWithComma_pass() throws Exception {
var b1a = MockRestClient.buildLax(B1a.class);
for (RestOperation op : ops(op("get","/a"),op("get","/b"),op("put","/c"),op("post","/d"),op("delete","/e"))) {
b1a.request(op).roles("foo").run().assertStatus(200);
b1a.request(op).roles("bar").run().assertStatus(200);
b1a.request(op).roles("foo","bar").run().assertStatus(200);
b1a.request(op).roles("foo","bar","baz").run().assertStatus(200);
b1a.request(op).roles().run().assertStatus(403);
b1a.request(op).roles("baz").run().assertStatus(403);
}
}
@Rest(roleGuard="foo | bar")
public static class B1b extends B1 {}
@Test void b01b_orsWithSinglePipe_pass() throws Exception {
var b1b = MockRestClient.buildLax(B1b.class);
for (RestOperation op : ops(op("get","/a"),op("get","/b"),op("put","/c"),op("post","/d"),op("delete","/e"))) {
b1b.request(op).roles("foo").run().assertStatus(200);
b1b.request(op).roles("bar").run().assertStatus(200);
b1b.request(op).roles("foo","bar").run().assertStatus(200);
b1b.request(op).roles("foo","bar","baz").run().assertStatus(200);
b1b.request(op).roles().run().assertStatus(403);
b1b.request(op).roles("baz").run().assertStatus(403);
}
}
@Rest(roleGuard="foo || bar")
public static class B1c extends B1 {}
@Test void b01c_orsWithDoublePipe_pass() throws Exception {
var b1c = MockRestClient.buildLax(B1c.class);
for (RestOperation op : ops(op("get","/a"),op("get","/b"),op("put","/c"),op("post","/d"),op("delete","/e"))) {
b1c.request(op).roles("foo").run().assertStatus(200);
b1c.request(op).roles("bar").run().assertStatus(200);
b1c.request(op).roles("foo","bar").run().assertStatus(200);
b1c.request(op).roles("foo","bar","baz").run().assertStatus(200);
b1c.request(op).roles().run().assertStatus(403);
b1c.request(op).roles("baz").run().assertStatus(403);
}
}
@Rest(roleGuard="foo & bar")
public static class B1d extends B1 {}
@Test void b01d_andsWithSingleAmp_pass() throws Exception {
var b1d = MockRestClient.buildLax(B1d.class);
for (RestOperation op : ops(op("get","/a"),op("get","/b"),op("put","/c"),op("post","/d"),op("delete","/e"))) {
b1d.request(op).roles("foo","bar").run().assertStatus(200);
b1d.request(op).roles("foo","bar","baz").run().assertStatus(200);
b1d.request(op).roles().run().assertStatus(403);
b1d.request(op).roles("foo").run().assertStatus(403);
b1d.request(op).roles("bar").run().assertStatus(403);
b1d.request(op).roles("baz").run().assertStatus(403);
}
}
@Rest(roleGuard="foo && bar")
public static class B1e extends B1 {}
@Test void b01e_andsWithDoubleAmp_pass() throws Exception {
var b1e = MockRestClient.buildLax(B1e.class);
for (RestOperation op : ops(op("get","/a"),op("get","/b"),op("put","/c"),op("post","/d"),op("delete","/e"))) {
b1e.request(op).roles("foo","bar").run().assertStatus(200);
b1e.request(op).roles("foo","bar","baz").run().assertStatus(200);
b1e.request(op).roles().run().assertStatus(403);
b1e.request(op).roles("foo").run().assertStatus(403);
b1e.request(op).roles("bar").run().assertStatus(403);
b1e.request(op).roles("baz").run().assertStatus(403);
}
}
@Rest(roleGuard="(foo) && (bar)")
public static class B1f extends B1 {}
@Test void b01f_andsWithDoubleAmpAndParens_pass() throws Exception {
var b1f = MockRestClient.buildLax(B1f.class);
for (RestOperation op : ops(op("get","/a"),op("get","/b"),op("put","/c"),op("post","/d"),op("delete","/e"))) {
b1f.request(op).roles("foo","bar").run().assertStatus(200);
b1f.request(op).roles("foo","bar","baz").run().assertStatus(200);
b1f.request(op).roles().run().assertStatus(403);
b1f.request(op).roles("foo").run().assertStatus(403);
b1f.request(op).roles("bar").run().assertStatus(403);
b1f.request(op).roles("baz").run().assertStatus(403);
}
}
@Rest(roleGuard="foo && (bar || baz)")
public static class B1g extends B1 {}
@Test void b01g_complex_pass() throws Exception {
var b1g = MockRestClient.buildLax(B1g.class);
for (RestOperation op : ops(op("get","/a"),op("get","/b"),op("put","/c"),op("post","/d"),op("delete","/e"))) {
b1g.request(op).roles("foo","bar").run().assertStatus(200);
b1g.request(op).roles("foo","baz").run().assertStatus(200);
b1g.request(op).roles("foo","bar","baz").run().assertStatus(200);
b1g.request(op).roles().run().assertStatus(403);
b1g.request(op).roles("foo").run().assertStatus(403);
b1g.request(op).roles("bar","baz").run().assertStatus(403);
b1g.request(op).roles("baz").run().assertStatus(403);
}
}
@Rest(roleGuard="foo || (bar && baz)")
public static class B1h extends B1 {}
@Test void b01h_complex_pass() throws Exception {
var b1h = MockRestClient.buildLax(B1h.class);
for (RestOperation op : ops(op("get","/a"),op("get","/b"),op("put","/c"),op("post","/d"),op("delete","/e"))) {
b1h.request(op).roles("foo").run().assertStatus(200);
b1h.request(op).roles("bar","baz").run().assertStatus(200);
b1h.request(op).roles("foo","bar","baz").run().assertStatus(200);
b1h.request(op).roles().run().assertStatus(403);
b1h.request(op).roles("bar").run().assertStatus(403);
b1h.request(op).roles("baz").run().assertStatus(403);
}
}
//-----------------------------------------------------------------------------------------------------------------
// @RestOp(roleGuard), multiple guards on method
//-----------------------------------------------------------------------------------------------------------------
@Rest
public static class B2a {
@RestOp(roleGuard="foo,bar")
public String a() {
return "OK";
}
@RestGet(roleGuard="foo,bar")
public String b() {
return "OK";
}
@RestPut(roleGuard="foo,bar")
public String c() {
return "OK";
}
@RestPost(roleGuard="foo,bar")
public String d() {
return "OK";
}
@RestDelete(roleGuard="foo,bar")
public String e() {
return "OK";
}
}
@Test void b02a_orsWithComma_pass() throws Exception {
var b2a = MockRestClient.buildLax(B2a.class);
for (RestOperation op : ops(op("get","/a"),op("get","/b"),op("put","/c"),op("post","/d"),op("delete","/e"))) {
b2a.request(op).roles("foo").run().assertStatus(200);
b2a.request(op).roles("bar").run().assertStatus(200);
b2a.request(op).roles("foo","bar").run().assertStatus(200);
b2a.request(op).roles("foo","bar","baz").run().assertStatus(200);
b2a.request(op).roles().run().assertStatus(403);
b2a.request(op).roles("baz").run().assertStatus(403);
}
}
@Rest
public static class B2b {
@RestOp(roleGuard="foo | bar")
public String a() {
return "OK";
}
@RestGet(roleGuard="foo | bar")
public String b() {
return "OK";
}
@RestPut(roleGuard="foo | bar")
public String c() {
return "OK";
}
@RestPost(roleGuard="foo | bar")
public String d() {
return "OK";
}
@RestDelete(roleGuard="foo | bar")
public String e() {
return "OK";
}
}
@Test void b02b_orsWithSinglePipe_pass() throws Exception {
var b2b = MockRestClient.buildLax(B2b.class);
for (RestOperation op : ops(op("get","/a"),op("get","/b"),op("put","/c"),op("post","/d"),op("delete","/e"))) {
b2b.request(op).roles("foo").run().assertStatus(200);
b2b.request(op).roles("bar").run().assertStatus(200);
b2b.request(op).roles("foo","bar").run().assertStatus(200);
b2b.request(op).roles("foo","bar","baz").run().assertStatus(200);
b2b.request(op).roles().run().assertStatus(403);
b2b.request(op).roles("baz").run().assertStatus(403);
}
}
@Rest
public static class B2c {
@RestOp(roleGuard="foo || bar")
public String a() {
return "OK";
}
@RestGet(roleGuard="foo || bar")
public String b() {
return "OK";
}
@RestPut(roleGuard="foo || bar")
public String c() {
return "OK";
}
@RestPost(roleGuard="foo || bar")
public String d() {
return "OK";
}
@RestDelete(roleGuard="foo || bar")
public String e() {
return "OK";
}
}
@Test void b02c_orsWithDoublePipe_pass() throws Exception {
var b2c = MockRestClient.buildLax(B2c.class);
for (RestOperation op : ops(op("get","/a"),op("get","/b"),op("put","/c"),op("post","/d"),op("delete","/e"))) {
b2c.request(op).roles("foo").run().assertStatus(200);
b2c.request(op).roles("bar").run().assertStatus(200);
b2c.request(op).roles("foo","bar").run().assertStatus(200);
b2c.request(op).roles("foo","bar","baz").run().assertStatus(200);
b2c.request(op).roles().run().assertStatus(403);
b2c.request(op).roles("baz").run().assertStatus(403);
}
}
@Rest
public static class B2d {
@RestOp(roleGuard="foo & bar")
public String a() {
return "OK";
}
@RestGet(roleGuard="foo & bar")
public String b() {
return "OK";
}
@RestPut(roleGuard="foo & bar")
public String c() {
return "OK";
}
@RestPost(roleGuard="foo & bar")
public String d() {
return "OK";
}
@RestDelete(roleGuard="foo & bar")
public String e() {
return "OK";
}
}
@Test void b02d_andsWithSingleAmp_pass() throws Exception {
var b2d = MockRestClient.buildLax(B2d.class);
for (RestOperation op : ops(op("get","/a"),op("get","/b"),op("put","/c"),op("post","/d"),op("delete","/e"))) {
b2d.request(op).roles("foo","bar").run().assertStatus(200);
b2d.request(op).roles("foo","bar","baz").run().assertStatus(200);
b2d.request(op).roles().run().assertStatus(403);
b2d.request(op).roles("foo").run().assertStatus(403);
b2d.request(op).roles("bar").run().assertStatus(403);
b2d.request(op).roles("baz").run().assertStatus(403);
}
}
@Rest
public static class B2e {
@RestOp(roleGuard="foo && bar")
public String a() {
return "OK";
}
@RestGet(roleGuard="foo && bar")
public String b() {
return "OK";
}
@RestPut(roleGuard="foo && bar")
public String c() {
return "OK";
}
@RestPost(roleGuard="foo && bar")
public String d() {
return "OK";
}
@RestDelete(roleGuard="foo && bar")
public String e() {
return "OK";
}
}
@Test void b02e_andsWithDoubleAmp_pass() throws Exception {
var b2e = MockRestClient.buildLax(B2e.class);
for (RestOperation op : ops(op("get","/a"),op("get","/b"),op("put","/c"),op("post","/d"),op("delete","/e"))) {
b2e.request(op).roles("foo","bar").run().assertStatus(200);
b2e.request(op).roles("foo","bar","baz").run().assertStatus(200);
b2e.request(op).roles().run().assertStatus(403);
b2e.request(op).roles("foo").run().assertStatus(403);
b2e.request(op).roles("bar").run().assertStatus(403);
b2e.request(op).roles("baz").run().assertStatus(403);
}
}
@Rest
public static class B2f {
@RestOp(roleGuard="(foo) && (bar)")
public String get() {
return "OK";
}
}
@Test void b02f_andsWithDoubleAmpAndParens_pass() throws Exception {
var b2f = MockRestClient.buildLax(B2f.class);
b2f.get().roles("foo","bar").run().assertStatus(200);
b2f.get().roles("foo","bar","baz").run().assertStatus(200);
b2f.get().roles().run().assertStatus(403);
b2f.get().roles("foo").run().assertStatus(403);
b2f.get().roles("bar").run().assertStatus(403);
b2f.get().roles("baz").run().assertStatus(403);
}
@Rest
public static class B2g {
@RestOp(roleGuard="foo && (bar || baz)")
public String get() {
return "OK";
}
}
@Test void b02g_complex_pass() throws Exception {
var b2g = MockRestClient.buildLax(B2g.class);
b2g.get().roles("foo","bar").run().assertStatus(200);
b2g.get().roles("foo","baz").run().assertStatus(200);
b2g.get().roles("foo","bar","baz").run().assertStatus(200);
b2g.get().roles().run().assertStatus(403);
b2g.get().roles("foo").run().assertStatus(403);
b2g.get().roles("bar","baz").run().assertStatus(403);
b2g.get().roles("baz").run().assertStatus(403);
}
@Rest
public static class B2h {
@RestOp(roleGuard="foo || (bar && baz)")
public String get() {
return "OK";
}
}
@Test void b02h_complex_pass() throws Exception {
var b2h = MockRestClient.buildLax(B2h.class);
b2h.get().roles("foo").run().assertStatus(200);
b2h.get().roles("bar","baz").run().assertStatus(200);
b2h.get().roles("foo","bar","baz").run().assertStatus(200);
b2h.get().roles().run().assertStatus(403);
b2h.get().roles("bar").run().assertStatus(403);
b2h.get().roles("baz").run().assertStatus(403);
}
//-----------------------------------------------------------------------------------------------------------------
// @Rest(roleGuard), pattern guards on class
//-----------------------------------------------------------------------------------------------------------------
@Rest(rolesDeclared="foo,bar,baz")
public static class C1 {
@RestOp
public String a() {
return "OK";
}
@RestGet
public String b() {
return "OK";
}
@RestPut
public String c() {
return "OK";
}
@RestPost
public String d() {
return "OK";
}
@RestDelete
public String e() {
return "OK";
}
}
@Rest(roleGuard="fo*,*ar")
public static class C1a extends C1 {}
@Test void c01a_orPatternsWithComma_pass() throws Exception {
var c1a = MockRestClient.buildLax(C1a.class);
for (RestOperation op : ops(op("get","/a"),op("get","/b"),op("put","/c"),op("post","/d"),op("delete","/e"))) {
c1a.request(op).roles("foo").run().assertStatus(200);
c1a.request(op).roles("bar").run().assertStatus(200);
c1a.request(op).roles("foo","bar").run().assertStatus(200);
c1a.request(op).roles("foo","bar","baz").run().assertStatus(200);
c1a.request(op).roles().run().assertStatus(403);
c1a.request(op).roles("baz").run().assertStatus(403);
}
}
@Rest(roleGuard="fo* | *ar")
public static class C1b extends C1 {}
@Test void c01b_orPatternsWithSinglePipe_pass() throws Exception {
var c1b = MockRestClient.buildLax(C1b.class);
for (RestOperation op : ops(op("get","/a"),op("get","/b"),op("put","/c"),op("post","/d"),op("delete","/e"))) {
c1b.request(op).roles("foo").run().assertStatus(200);
c1b.request(op).roles("bar").run().assertStatus(200);
c1b.request(op).roles("foo","bar").run().assertStatus(200);
c1b.request(op).roles("foo","bar","baz").run().assertStatus(200);
c1b.request(op).roles().run().assertStatus(403);
c1b.request(op).roles("baz").run().assertStatus(403);
}
}
@Rest(roleGuard="fo* || *ar")
public static class C1c extends C1 {}
@Test void c01c_orPatternsWithDoublePipe_pass() throws Exception {
var c1c = MockRestClient.buildLax(C1c.class);
for (RestOperation op : ops(op("get","/a"),op("get","/b"),op("put","/c"),op("post","/d"),op("delete","/e"))) {
c1c.request(op).roles("foo").run().assertStatus(200);
c1c.request(op).roles("bar").run().assertStatus(200);
c1c.request(op).roles("foo","bar").run().assertStatus(200);
c1c.request(op).roles("foo","bar","baz").run().assertStatus(200);
c1c.request(op).roles().run().assertStatus(403);
c1c.request(op).roles("baz").run().assertStatus(403);
}
}
@Rest(roleGuard="fo* & *ar")
public static class C1d extends C1 {}
@Test void c01d_andPatternsWithSingleAmp_pass() throws Exception {
var c1d = MockRestClient.buildLax(C1d.class);
for (RestOperation op : ops(op("get","/a"),op("get","/b"),op("put","/c"),op("post","/d"),op("delete","/e"))) {
c1d.request(op).roles("foo","bar").run().assertStatus(200);
c1d.request(op).roles("foo","bar","baz").run().assertStatus(200);
c1d.request(op).roles().run().assertStatus(403);
c1d.request(op).roles("foo").run().assertStatus(403);
c1d.request(op).roles("bar").run().assertStatus(403);
c1d.request(op).roles("baz").run().assertStatus(403);
}
}
@Rest(roleGuard="fo* && *ar")
public static class C1e extends C1 {}
@Test void c01e_andPatternsWithDoubleAmp_pass() throws Exception {
var c1e = MockRestClient.buildLax(C1e.class);
for (RestOperation op : ops(op("get","/a"),op("get","/b"),op("put","/c"),op("post","/d"),op("delete","/e"))) {
c1e.request(op).roles("foo","bar").run().assertStatus(200);
c1e.request(op).roles("foo","bar","baz").run().assertStatus(200);
c1e.request(op).roles().run().assertStatus(403);
c1e.request(op).roles("foo").run().assertStatus(403);
c1e.request(op).roles("bar").run().assertStatus(403);
c1e.request(op).roles("baz").run().assertStatus(403);
}
}
@Rest(roleGuard="(fo*) && (*ar)")
public static class C1f extends C1 {}
@Test void c01f_andPatternsWithDoubleAmpAndParens_pass() throws Exception {
var c1f = MockRestClient.buildLax(C1f.class);
for (RestOperation op : ops(op("get","/a"),op("get","/b"),op("put","/c"),op("post","/d"),op("delete","/e"))) {
c1f.request(op).roles("foo","bar").run().assertStatus(200);
c1f.request(op).roles("foo","bar","baz").run().assertStatus(200);
c1f.request(op).roles().run().assertStatus(403);
c1f.request(op).roles("foo").run().assertStatus(403);
c1f.request(op).roles("bar").run().assertStatus(403);
c1f.request(op).roles("baz").run().assertStatus(403);
}
}
@Rest(roleGuard="fo* && (*ar || *az)")
public static class C1g extends C1 {}
@Test void c01g_complexPatterns_pass() throws Exception {
var c1g = MockRestClient.buildLax(C1g.class);
for (RestOperation op : ops(op("get","/a"),op("get","/b"),op("put","/c"),op("post","/d"),op("delete","/e"))) {
c1g.request(op).roles("foo","bar").run().assertStatus(200);
c1g.request(op).roles("foo","baz").run().assertStatus(200);
c1g.request(op).roles("foo","bar","baz").run().assertStatus(200);
c1g.request(op).roles().run().assertStatus(403);
c1g.request(op).roles("foo").run().assertStatus(403);
c1g.request(op).roles("bar","baz").run().assertStatus(403);
c1g.request(op).roles("baz").run().assertStatus(403);
}
}
@Rest(roleGuard="fo* || (*ar && *az)")
public static class C1h extends C1 {}
@Test void c01h_complexPatterns_pass() throws Exception {
var c1h = MockRestClient.buildLax(C1h.class);
for (RestOperation op : ops(op("get","/a"),op("get","/b"),op("put","/c"),op("post","/d"),op("delete","/e"))) {
c1h.request(op).roles("foo").run().assertStatus(200);
c1h.request(op).roles("bar","baz").run().assertStatus(200);
c1h.request(op).roles("foo","bar","baz").run().assertStatus(200);
c1h.request(op).roles().run().assertStatus(403);
c1h.request(op).roles("bar").run().assertStatus(403);
c1h.request(op).roles("baz").run().assertStatus(403);
}
}
//-----------------------------------------------------------------------------------------------------------------
// @RestOp(roleGuard), pattern guards on method
//-----------------------------------------------------------------------------------------------------------------
@Rest
public static class C2a {
@RestOp(roleGuard="fo*,*ar",rolesDeclared="foo,bar,baz")
public String a() {
return "OK";
}
@RestGet(roleGuard="fo*,*ar",rolesDeclared="foo,bar,baz")
public String b() {
return "OK";
}
@RestPut(roleGuard="fo*,*ar",rolesDeclared="foo,bar,baz")
public String c() {
return "OK";
}
@RestPost(roleGuard="fo*,*ar",rolesDeclared="foo,bar,baz")
public String d() {
return "OK";
}
@RestDelete(roleGuard="fo*,*ar",rolesDeclared="foo,bar,baz")
public String e() {
return "OK";
}
}
@Test void c02a_orPatternsWithComma_pass() throws Exception {
var c2a = MockRestClient.buildLax(C2a.class);
for (RestOperation op : ops(op("get","/a"),op("get","/b"),op("put","/c"),op("post","/d"),op("delete","/e"))) {
c2a.request(op).roles("foo").run().assertStatus(200);
c2a.request(op).roles("bar").run().assertStatus(200);
c2a.request(op).roles("foo","bar").run().assertStatus(200);
c2a.request(op).roles("foo","bar","baz").run().assertStatus(200);
c2a.request(op).roles().run().assertStatus(403);
c2a.request(op).roles("baz").run().assertStatus(403);
}
}
@Rest
public static class C2b {
@RestOp(roleGuard="fo* | *ar",rolesDeclared="foo,bar,baz")
public String get() {
return "OK";
}
}
@Test void c02b_orPatternsWithSinglePipe_pass() throws Exception {
var c2b = MockRestClient.buildLax(C2b.class);
c2b.get().roles("foo").run().assertStatus(200);
c2b.get().roles("bar").run().assertStatus(200);
c2b.get().roles("foo","bar").run().assertStatus(200);
c2b.get().roles("foo","bar","baz").run().assertStatus(200);
c2b.get().roles().run().assertStatus(403);
c2b.get().roles("baz").run().assertStatus(403);
}
@Rest
public static class C2c {
@RestOp(roleGuard="fo* || *ar",rolesDeclared="foo,bar,baz")
public String get() {
return "OK";
}
}
@Test void c02c_orPatternsWithDoublePipe_pass() throws Exception {
var c2c = MockRestClient.buildLax(C2c.class);
c2c.get().roles("foo").run().assertStatus(200);
c2c.get().roles("bar").run().assertStatus(200);
c2c.get().roles("foo","bar").run().assertStatus(200);
c2c.get().roles("foo","bar","baz").run().assertStatus(200);
c2c.get().roles().run().assertStatus(403);
c2c.get().roles("baz").run().assertStatus(403);
}
@Rest
public static class C2d {
@RestOp(roleGuard="fo* & *ar",rolesDeclared="foo,bar,baz")
public String get() {
return "OK";
}
}
@Test void c02d_andPatternsWithSingleAmp_pass() throws Exception {
var c2d = MockRestClient.buildLax(C2d.class);
c2d.get().roles("foo","bar").run().assertStatus(200);
c2d.get().roles("foo","bar","baz").run().assertStatus(200);
c2d.get().roles().run().assertStatus(403);
c2d.get().roles("foo").run().assertStatus(403);
c2d.get().roles("bar").run().assertStatus(403);
c2d.get().roles("baz").run().assertStatus(403);
}
@Rest
public static class C2e {
@RestOp(roleGuard="fo* && *ar",rolesDeclared="foo,bar,baz")
public String get() {
return "OK";
}
}
@Test void c02e_andPatternsWithDoubleAmp_pass() throws Exception {
var c2e = MockRestClient.buildLax(C2e.class);
c2e.get().roles("foo","bar").run().assertStatus(200);
c2e.get().roles("foo","bar","baz").run().assertStatus(200);
c2e.get().roles().run().assertStatus(403);
c2e.get().roles("foo").run().assertStatus(403);
c2e.get().roles("bar").run().assertStatus(403);
c2e.get().roles("baz").run().assertStatus(403);
}
@Rest
public static class C2f {
@RestOp(roleGuard="(fo*) && (*ar)",rolesDeclared="foo,bar,baz")
public String get() {
return "OK";
}
}
@Test void c02f_andPatternsWithDoubleAmpAndParens_pass() throws Exception {
var c2f = MockRestClient.buildLax(C2f.class);
c2f.get().roles("foo","bar").run().assertStatus(200);
c2f.get().roles("foo","bar","baz").run().assertStatus(200);
c2f.get().roles().run().assertStatus(403);
c2f.get().roles("foo").run().assertStatus(403);
c2f.get().roles("bar").run().assertStatus(403);
c2f.get().roles("baz").run().assertStatus(403);
}
@Rest
public static class C2g {
@RestOp(roleGuard="fo* && (*ar || *az)",rolesDeclared="foo,bar,baz")
public String get() {
return "OK";
}
}
@Test void c02g_complexPatterns_pass() throws Exception {
var c2g = MockRestClient.buildLax(C2g.class);
c2g.get().roles("foo","bar").run().assertStatus(200);
c2g.get().roles("foo","baz").run().assertStatus(200);
c2g.get().roles("foo","bar","baz").run().assertStatus(200);
c2g.get().roles().run().assertStatus(403);
c2g.get().roles("foo").run().assertStatus(403);
c2g.get().roles("bar","baz").run().assertStatus(403);
c2g.get().roles("baz").run().assertStatus(403);
}
@Rest
public static class C2h {
@RestOp(roleGuard="fo* || (*ar && *az)",rolesDeclared="foo,bar,baz")
public String get() {
return "OK";
}
}
@Test void c02h_complexPatterns_pass() throws Exception {
var c2h = MockRestClient.buildLax(C2h.class);
c2h.get().roles("foo").run().assertStatus(200);
c2h.get().roles("bar","baz").run().assertStatus(200);
c2h.get().roles("foo","bar","baz").run().assertStatus(200);
c2h.get().roles().run().assertStatus(403);
c2h.get().roles("bar").run().assertStatus(403);
c2h.get().roles("baz").run().assertStatus(403);
}
//-----------------------------------------------------------------------------------------------------------------
// @RestOp(roleGuard), pattern guards on method but no roles defined
//-----------------------------------------------------------------------------------------------------------------
@Rest
public static class D {
@RestOp(roleGuard="fo*,*ar")
public String a() {
return "OK";
}
@RestGet(roleGuard="fo*,*ar")
public String b() {
return "OK";
}
@RestPut(roleGuard="fo*,*ar")
public String c() {
return "OK";
}
@RestPost(roleGuard="fo*,*ar")
public String d() {
return "OK";
}
@RestDelete(roleGuard="fo*,*ar")
public String e() {
return "OK";
}
}
@Test void d01_patternsWithoutRoles() throws Exception {
var d = MockRestClient.buildLax(D.class);
for (RestOperation op : ops(op("get","/a"),op("get","/b"),op("put","/c"),op("post","/d"),op("delete","/e"))) {
d.request(op).roles().run().assertStatus(403);
d.request(op).roles("foo").run().assertStatus(403);
d.request(op).roles("bar").run().assertStatus(403);
d.request(op).roles("baz").run().assertStatus(403);
d.request(op).roles("foo","bar").run().assertStatus(403);
}
}
//-----------------------------------------------------------------------------------------------------------------
// @RestOp(roleGuard), any role.
//-----------------------------------------------------------------------------------------------------------------
@Rest(rolesDeclared="foo,bar,baz")
public static class E {
@RestOp(roleGuard="*")
public String get() {
return "OK";
}
}
@Test void e01_anyRole_pass() throws Exception {
var e = MockRestClient.buildLax(E.class);
e.get().roles("foo").run().assertStatus(200);
e.get().roles("bar").run().assertStatus(200);
e.get().roles("baz").run().assertStatus(200);
e.get().roles("foo","bar").run().assertStatus(200);
e.get().roles().run().assertStatus(403);
}
} |
apache/hive | 37,027 | standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SQLNotNullConstraint.java | /**
* Autogenerated by Thrift Compiler (0.16.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.hadoop.hive.metastore.api;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.16.0)")
@org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public class SQLNotNullConstraint implements org.apache.thrift.TBase<SQLNotNullConstraint, SQLNotNullConstraint._Fields>, java.io.Serializable, Cloneable, Comparable<SQLNotNullConstraint> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("SQLNotNullConstraint");
private static final org.apache.thrift.protocol.TField CAT_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("catName", org.apache.thrift.protocol.TType.STRING, (short)1);
private static final org.apache.thrift.protocol.TField TABLE_DB_FIELD_DESC = new org.apache.thrift.protocol.TField("table_db", org.apache.thrift.protocol.TType.STRING, (short)2);
private static final org.apache.thrift.protocol.TField TABLE_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("table_name", org.apache.thrift.protocol.TType.STRING, (short)3);
private static final org.apache.thrift.protocol.TField COLUMN_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("column_name", org.apache.thrift.protocol.TType.STRING, (short)4);
private static final org.apache.thrift.protocol.TField NN_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("nn_name", org.apache.thrift.protocol.TType.STRING, (short)5);
private static final org.apache.thrift.protocol.TField ENABLE_CSTR_FIELD_DESC = new org.apache.thrift.protocol.TField("enable_cstr", org.apache.thrift.protocol.TType.BOOL, (short)6);
private static final org.apache.thrift.protocol.TField VALIDATE_CSTR_FIELD_DESC = new org.apache.thrift.protocol.TField("validate_cstr", org.apache.thrift.protocol.TType.BOOL, (short)7);
private static final org.apache.thrift.protocol.TField RELY_CSTR_FIELD_DESC = new org.apache.thrift.protocol.TField("rely_cstr", org.apache.thrift.protocol.TType.BOOL, (short)8);
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new SQLNotNullConstraintStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new SQLNotNullConstraintTupleSchemeFactory();
private @org.apache.thrift.annotation.Nullable java.lang.String catName; // required
private @org.apache.thrift.annotation.Nullable java.lang.String table_db; // required
private @org.apache.thrift.annotation.Nullable java.lang.String table_name; // required
private @org.apache.thrift.annotation.Nullable java.lang.String column_name; // required
private @org.apache.thrift.annotation.Nullable java.lang.String nn_name; // required
private boolean enable_cstr; // required
private boolean validate_cstr; // required
private boolean rely_cstr; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
CAT_NAME((short)1, "catName"),
TABLE_DB((short)2, "table_db"),
TABLE_NAME((short)3, "table_name"),
COLUMN_NAME((short)4, "column_name"),
NN_NAME((short)5, "nn_name"),
ENABLE_CSTR((short)6, "enable_cstr"),
VALIDATE_CSTR((short)7, "validate_cstr"),
RELY_CSTR((short)8, "rely_cstr");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // CAT_NAME
return CAT_NAME;
case 2: // TABLE_DB
return TABLE_DB;
case 3: // TABLE_NAME
return TABLE_NAME;
case 4: // COLUMN_NAME
return COLUMN_NAME;
case 5: // NN_NAME
return NN_NAME;
case 6: // ENABLE_CSTR
return ENABLE_CSTR;
case 7: // VALIDATE_CSTR
return VALIDATE_CSTR;
case 8: // RELY_CSTR
return RELY_CSTR;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __ENABLE_CSTR_ISSET_ID = 0;
private static final int __VALIDATE_CSTR_ISSET_ID = 1;
private static final int __RELY_CSTR_ISSET_ID = 2;
private byte __isset_bitfield = 0;
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.CAT_NAME, new org.apache.thrift.meta_data.FieldMetaData("catName", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.TABLE_DB, new org.apache.thrift.meta_data.FieldMetaData("table_db", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.TABLE_NAME, new org.apache.thrift.meta_data.FieldMetaData("table_name", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.COLUMN_NAME, new org.apache.thrift.meta_data.FieldMetaData("column_name", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.NN_NAME, new org.apache.thrift.meta_data.FieldMetaData("nn_name", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.ENABLE_CSTR, new org.apache.thrift.meta_data.FieldMetaData("enable_cstr", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
tmpMap.put(_Fields.VALIDATE_CSTR, new org.apache.thrift.meta_data.FieldMetaData("validate_cstr", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
tmpMap.put(_Fields.RELY_CSTR, new org.apache.thrift.meta_data.FieldMetaData("rely_cstr", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(SQLNotNullConstraint.class, metaDataMap);
}
public SQLNotNullConstraint() {
}
public SQLNotNullConstraint(
java.lang.String catName,
java.lang.String table_db,
java.lang.String table_name,
java.lang.String column_name,
java.lang.String nn_name,
boolean enable_cstr,
boolean validate_cstr,
boolean rely_cstr)
{
this();
this.catName = catName;
this.table_db = table_db;
this.table_name = table_name;
this.column_name = column_name;
this.nn_name = nn_name;
this.enable_cstr = enable_cstr;
setEnable_cstrIsSet(true);
this.validate_cstr = validate_cstr;
setValidate_cstrIsSet(true);
this.rely_cstr = rely_cstr;
setRely_cstrIsSet(true);
}
/**
* Performs a deep copy on <i>other</i>.
*/
public SQLNotNullConstraint(SQLNotNullConstraint other) {
__isset_bitfield = other.__isset_bitfield;
if (other.isSetCatName()) {
this.catName = other.catName;
}
if (other.isSetTable_db()) {
this.table_db = other.table_db;
}
if (other.isSetTable_name()) {
this.table_name = other.table_name;
}
if (other.isSetColumn_name()) {
this.column_name = other.column_name;
}
if (other.isSetNn_name()) {
this.nn_name = other.nn_name;
}
this.enable_cstr = other.enable_cstr;
this.validate_cstr = other.validate_cstr;
this.rely_cstr = other.rely_cstr;
}
public SQLNotNullConstraint deepCopy() {
return new SQLNotNullConstraint(this);
}
@Override
public void clear() {
this.catName = null;
this.table_db = null;
this.table_name = null;
this.column_name = null;
this.nn_name = null;
setEnable_cstrIsSet(false);
this.enable_cstr = false;
setValidate_cstrIsSet(false);
this.validate_cstr = false;
setRely_cstrIsSet(false);
this.rely_cstr = false;
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getCatName() {
return this.catName;
}
public void setCatName(@org.apache.thrift.annotation.Nullable java.lang.String catName) {
this.catName = catName;
}
public void unsetCatName() {
this.catName = null;
}
/** Returns true if field catName is set (has been assigned a value) and false otherwise */
public boolean isSetCatName() {
return this.catName != null;
}
public void setCatNameIsSet(boolean value) {
if (!value) {
this.catName = null;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getTable_db() {
return this.table_db;
}
public void setTable_db(@org.apache.thrift.annotation.Nullable java.lang.String table_db) {
this.table_db = table_db;
}
public void unsetTable_db() {
this.table_db = null;
}
/** Returns true if field table_db is set (has been assigned a value) and false otherwise */
public boolean isSetTable_db() {
return this.table_db != null;
}
public void setTable_dbIsSet(boolean value) {
if (!value) {
this.table_db = null;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getTable_name() {
return this.table_name;
}
public void setTable_name(@org.apache.thrift.annotation.Nullable java.lang.String table_name) {
this.table_name = table_name;
}
public void unsetTable_name() {
this.table_name = null;
}
/** Returns true if field table_name is set (has been assigned a value) and false otherwise */
public boolean isSetTable_name() {
return this.table_name != null;
}
public void setTable_nameIsSet(boolean value) {
if (!value) {
this.table_name = null;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getColumn_name() {
return this.column_name;
}
public void setColumn_name(@org.apache.thrift.annotation.Nullable java.lang.String column_name) {
this.column_name = column_name;
}
public void unsetColumn_name() {
this.column_name = null;
}
/** Returns true if field column_name is set (has been assigned a value) and false otherwise */
public boolean isSetColumn_name() {
return this.column_name != null;
}
public void setColumn_nameIsSet(boolean value) {
if (!value) {
this.column_name = null;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getNn_name() {
return this.nn_name;
}
public void setNn_name(@org.apache.thrift.annotation.Nullable java.lang.String nn_name) {
this.nn_name = nn_name;
}
public void unsetNn_name() {
this.nn_name = null;
}
/** Returns true if field nn_name is set (has been assigned a value) and false otherwise */
public boolean isSetNn_name() {
return this.nn_name != null;
}
public void setNn_nameIsSet(boolean value) {
if (!value) {
this.nn_name = null;
}
}
public boolean isEnable_cstr() {
return this.enable_cstr;
}
public void setEnable_cstr(boolean enable_cstr) {
this.enable_cstr = enable_cstr;
setEnable_cstrIsSet(true);
}
public void unsetEnable_cstr() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __ENABLE_CSTR_ISSET_ID);
}
/** Returns true if field enable_cstr is set (has been assigned a value) and false otherwise */
public boolean isSetEnable_cstr() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __ENABLE_CSTR_ISSET_ID);
}
public void setEnable_cstrIsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __ENABLE_CSTR_ISSET_ID, value);
}
public boolean isValidate_cstr() {
return this.validate_cstr;
}
public void setValidate_cstr(boolean validate_cstr) {
this.validate_cstr = validate_cstr;
setValidate_cstrIsSet(true);
}
public void unsetValidate_cstr() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __VALIDATE_CSTR_ISSET_ID);
}
/** Returns true if field validate_cstr is set (has been assigned a value) and false otherwise */
public boolean isSetValidate_cstr() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __VALIDATE_CSTR_ISSET_ID);
}
public void setValidate_cstrIsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __VALIDATE_CSTR_ISSET_ID, value);
}
public boolean isRely_cstr() {
return this.rely_cstr;
}
public void setRely_cstr(boolean rely_cstr) {
this.rely_cstr = rely_cstr;
setRely_cstrIsSet(true);
}
public void unsetRely_cstr() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __RELY_CSTR_ISSET_ID);
}
/** Returns true if field rely_cstr is set (has been assigned a value) and false otherwise */
public boolean isSetRely_cstr() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __RELY_CSTR_ISSET_ID);
}
public void setRely_cstrIsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __RELY_CSTR_ISSET_ID, value);
}
public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
switch (field) {
case CAT_NAME:
if (value == null) {
unsetCatName();
} else {
setCatName((java.lang.String)value);
}
break;
case TABLE_DB:
if (value == null) {
unsetTable_db();
} else {
setTable_db((java.lang.String)value);
}
break;
case TABLE_NAME:
if (value == null) {
unsetTable_name();
} else {
setTable_name((java.lang.String)value);
}
break;
case COLUMN_NAME:
if (value == null) {
unsetColumn_name();
} else {
setColumn_name((java.lang.String)value);
}
break;
case NN_NAME:
if (value == null) {
unsetNn_name();
} else {
setNn_name((java.lang.String)value);
}
break;
case ENABLE_CSTR:
if (value == null) {
unsetEnable_cstr();
} else {
setEnable_cstr((java.lang.Boolean)value);
}
break;
case VALIDATE_CSTR:
if (value == null) {
unsetValidate_cstr();
} else {
setValidate_cstr((java.lang.Boolean)value);
}
break;
case RELY_CSTR:
if (value == null) {
unsetRely_cstr();
} else {
setRely_cstr((java.lang.Boolean)value);
}
break;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
case CAT_NAME:
return getCatName();
case TABLE_DB:
return getTable_db();
case TABLE_NAME:
return getTable_name();
case COLUMN_NAME:
return getColumn_name();
case NN_NAME:
return getNn_name();
case ENABLE_CSTR:
return isEnable_cstr();
case VALIDATE_CSTR:
return isValidate_cstr();
case RELY_CSTR:
return isRely_cstr();
}
throw new java.lang.IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
case CAT_NAME:
return isSetCatName();
case TABLE_DB:
return isSetTable_db();
case TABLE_NAME:
return isSetTable_name();
case COLUMN_NAME:
return isSetColumn_name();
case NN_NAME:
return isSetNn_name();
case ENABLE_CSTR:
return isSetEnable_cstr();
case VALIDATE_CSTR:
return isSetValidate_cstr();
case RELY_CSTR:
return isSetRely_cstr();
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that instanceof SQLNotNullConstraint)
return this.equals((SQLNotNullConstraint)that);
return false;
}
public boolean equals(SQLNotNullConstraint that) {
if (that == null)
return false;
if (this == that)
return true;
boolean this_present_catName = true && this.isSetCatName();
boolean that_present_catName = true && that.isSetCatName();
if (this_present_catName || that_present_catName) {
if (!(this_present_catName && that_present_catName))
return false;
if (!this.catName.equals(that.catName))
return false;
}
boolean this_present_table_db = true && this.isSetTable_db();
boolean that_present_table_db = true && that.isSetTable_db();
if (this_present_table_db || that_present_table_db) {
if (!(this_present_table_db && that_present_table_db))
return false;
if (!this.table_db.equals(that.table_db))
return false;
}
boolean this_present_table_name = true && this.isSetTable_name();
boolean that_present_table_name = true && that.isSetTable_name();
if (this_present_table_name || that_present_table_name) {
if (!(this_present_table_name && that_present_table_name))
return false;
if (!this.table_name.equals(that.table_name))
return false;
}
boolean this_present_column_name = true && this.isSetColumn_name();
boolean that_present_column_name = true && that.isSetColumn_name();
if (this_present_column_name || that_present_column_name) {
if (!(this_present_column_name && that_present_column_name))
return false;
if (!this.column_name.equals(that.column_name))
return false;
}
boolean this_present_nn_name = true && this.isSetNn_name();
boolean that_present_nn_name = true && that.isSetNn_name();
if (this_present_nn_name || that_present_nn_name) {
if (!(this_present_nn_name && that_present_nn_name))
return false;
if (!this.nn_name.equals(that.nn_name))
return false;
}
boolean this_present_enable_cstr = true;
boolean that_present_enable_cstr = true;
if (this_present_enable_cstr || that_present_enable_cstr) {
if (!(this_present_enable_cstr && that_present_enable_cstr))
return false;
if (this.enable_cstr != that.enable_cstr)
return false;
}
boolean this_present_validate_cstr = true;
boolean that_present_validate_cstr = true;
if (this_present_validate_cstr || that_present_validate_cstr) {
if (!(this_present_validate_cstr && that_present_validate_cstr))
return false;
if (this.validate_cstr != that.validate_cstr)
return false;
}
boolean this_present_rely_cstr = true;
boolean that_present_rely_cstr = true;
if (this_present_rely_cstr || that_present_rely_cstr) {
if (!(this_present_rely_cstr && that_present_rely_cstr))
return false;
if (this.rely_cstr != that.rely_cstr)
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = hashCode * 8191 + ((isSetCatName()) ? 131071 : 524287);
if (isSetCatName())
hashCode = hashCode * 8191 + catName.hashCode();
hashCode = hashCode * 8191 + ((isSetTable_db()) ? 131071 : 524287);
if (isSetTable_db())
hashCode = hashCode * 8191 + table_db.hashCode();
hashCode = hashCode * 8191 + ((isSetTable_name()) ? 131071 : 524287);
if (isSetTable_name())
hashCode = hashCode * 8191 + table_name.hashCode();
hashCode = hashCode * 8191 + ((isSetColumn_name()) ? 131071 : 524287);
if (isSetColumn_name())
hashCode = hashCode * 8191 + column_name.hashCode();
hashCode = hashCode * 8191 + ((isSetNn_name()) ? 131071 : 524287);
if (isSetNn_name())
hashCode = hashCode * 8191 + nn_name.hashCode();
hashCode = hashCode * 8191 + ((enable_cstr) ? 131071 : 524287);
hashCode = hashCode * 8191 + ((validate_cstr) ? 131071 : 524287);
hashCode = hashCode * 8191 + ((rely_cstr) ? 131071 : 524287);
return hashCode;
}
@Override
public int compareTo(SQLNotNullConstraint other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = java.lang.Boolean.compare(isSetCatName(), other.isSetCatName());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetCatName()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.catName, other.catName);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetTable_db(), other.isSetTable_db());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetTable_db()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.table_db, other.table_db);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetTable_name(), other.isSetTable_name());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetTable_name()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.table_name, other.table_name);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetColumn_name(), other.isSetColumn_name());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetColumn_name()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.column_name, other.column_name);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetNn_name(), other.isSetNn_name());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetNn_name()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.nn_name, other.nn_name);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetEnable_cstr(), other.isSetEnable_cstr());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetEnable_cstr()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.enable_cstr, other.enable_cstr);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetValidate_cstr(), other.isSetValidate_cstr());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetValidate_cstr()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.validate_cstr, other.validate_cstr);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetRely_cstr(), other.isSetRely_cstr());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetRely_cstr()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.rely_cstr, other.rely_cstr);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
@org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("SQLNotNullConstraint(");
boolean first = true;
sb.append("catName:");
if (this.catName == null) {
sb.append("null");
} else {
sb.append(this.catName);
}
first = false;
if (!first) sb.append(", ");
sb.append("table_db:");
if (this.table_db == null) {
sb.append("null");
} else {
sb.append(this.table_db);
}
first = false;
if (!first) sb.append(", ");
sb.append("table_name:");
if (this.table_name == null) {
sb.append("null");
} else {
sb.append(this.table_name);
}
first = false;
if (!first) sb.append(", ");
sb.append("column_name:");
if (this.column_name == null) {
sb.append("null");
} else {
sb.append(this.column_name);
}
first = false;
if (!first) sb.append(", ");
sb.append("nn_name:");
if (this.nn_name == null) {
sb.append("null");
} else {
sb.append(this.nn_name);
}
first = false;
if (!first) sb.append(", ");
sb.append("enable_cstr:");
sb.append(this.enable_cstr);
first = false;
if (!first) sb.append(", ");
sb.append("validate_cstr:");
sb.append(this.validate_cstr);
first = false;
if (!first) sb.append(", ");
sb.append("rely_cstr:");
sb.append(this.rely_cstr);
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class SQLNotNullConstraintStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public SQLNotNullConstraintStandardScheme getScheme() {
return new SQLNotNullConstraintStandardScheme();
}
}
private static class SQLNotNullConstraintStandardScheme extends org.apache.thrift.scheme.StandardScheme<SQLNotNullConstraint> {
public void read(org.apache.thrift.protocol.TProtocol iprot, SQLNotNullConstraint struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // CAT_NAME
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.catName = iprot.readString();
struct.setCatNameIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // TABLE_DB
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.table_db = iprot.readString();
struct.setTable_dbIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 3: // TABLE_NAME
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.table_name = iprot.readString();
struct.setTable_nameIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 4: // COLUMN_NAME
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.column_name = iprot.readString();
struct.setColumn_nameIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 5: // NN_NAME
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.nn_name = iprot.readString();
struct.setNn_nameIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 6: // ENABLE_CSTR
if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) {
struct.enable_cstr = iprot.readBool();
struct.setEnable_cstrIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 7: // VALIDATE_CSTR
if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) {
struct.validate_cstr = iprot.readBool();
struct.setValidate_cstrIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 8: // RELY_CSTR
if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) {
struct.rely_cstr = iprot.readBool();
struct.setRely_cstrIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, SQLNotNullConstraint struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.catName != null) {
oprot.writeFieldBegin(CAT_NAME_FIELD_DESC);
oprot.writeString(struct.catName);
oprot.writeFieldEnd();
}
if (struct.table_db != null) {
oprot.writeFieldBegin(TABLE_DB_FIELD_DESC);
oprot.writeString(struct.table_db);
oprot.writeFieldEnd();
}
if (struct.table_name != null) {
oprot.writeFieldBegin(TABLE_NAME_FIELD_DESC);
oprot.writeString(struct.table_name);
oprot.writeFieldEnd();
}
if (struct.column_name != null) {
oprot.writeFieldBegin(COLUMN_NAME_FIELD_DESC);
oprot.writeString(struct.column_name);
oprot.writeFieldEnd();
}
if (struct.nn_name != null) {
oprot.writeFieldBegin(NN_NAME_FIELD_DESC);
oprot.writeString(struct.nn_name);
oprot.writeFieldEnd();
}
oprot.writeFieldBegin(ENABLE_CSTR_FIELD_DESC);
oprot.writeBool(struct.enable_cstr);
oprot.writeFieldEnd();
oprot.writeFieldBegin(VALIDATE_CSTR_FIELD_DESC);
oprot.writeBool(struct.validate_cstr);
oprot.writeFieldEnd();
oprot.writeFieldBegin(RELY_CSTR_FIELD_DESC);
oprot.writeBool(struct.rely_cstr);
oprot.writeFieldEnd();
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class SQLNotNullConstraintTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public SQLNotNullConstraintTupleScheme getScheme() {
return new SQLNotNullConstraintTupleScheme();
}
}
private static class SQLNotNullConstraintTupleScheme extends org.apache.thrift.scheme.TupleScheme<SQLNotNullConstraint> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, SQLNotNullConstraint struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet optionals = new java.util.BitSet();
if (struct.isSetCatName()) {
optionals.set(0);
}
if (struct.isSetTable_db()) {
optionals.set(1);
}
if (struct.isSetTable_name()) {
optionals.set(2);
}
if (struct.isSetColumn_name()) {
optionals.set(3);
}
if (struct.isSetNn_name()) {
optionals.set(4);
}
if (struct.isSetEnable_cstr()) {
optionals.set(5);
}
if (struct.isSetValidate_cstr()) {
optionals.set(6);
}
if (struct.isSetRely_cstr()) {
optionals.set(7);
}
oprot.writeBitSet(optionals, 8);
if (struct.isSetCatName()) {
oprot.writeString(struct.catName);
}
if (struct.isSetTable_db()) {
oprot.writeString(struct.table_db);
}
if (struct.isSetTable_name()) {
oprot.writeString(struct.table_name);
}
if (struct.isSetColumn_name()) {
oprot.writeString(struct.column_name);
}
if (struct.isSetNn_name()) {
oprot.writeString(struct.nn_name);
}
if (struct.isSetEnable_cstr()) {
oprot.writeBool(struct.enable_cstr);
}
if (struct.isSetValidate_cstr()) {
oprot.writeBool(struct.validate_cstr);
}
if (struct.isSetRely_cstr()) {
oprot.writeBool(struct.rely_cstr);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, SQLNotNullConstraint struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet incoming = iprot.readBitSet(8);
if (incoming.get(0)) {
struct.catName = iprot.readString();
struct.setCatNameIsSet(true);
}
if (incoming.get(1)) {
struct.table_db = iprot.readString();
struct.setTable_dbIsSet(true);
}
if (incoming.get(2)) {
struct.table_name = iprot.readString();
struct.setTable_nameIsSet(true);
}
if (incoming.get(3)) {
struct.column_name = iprot.readString();
struct.setColumn_nameIsSet(true);
}
if (incoming.get(4)) {
struct.nn_name = iprot.readString();
struct.setNn_nameIsSet(true);
}
if (incoming.get(5)) {
struct.enable_cstr = iprot.readBool();
struct.setEnable_cstrIsSet(true);
}
if (incoming.get(6)) {
struct.validate_cstr = iprot.readBool();
struct.setValidate_cstrIsSet(true);
}
if (incoming.get(7)) {
struct.rely_cstr = iprot.readBool();
struct.setRely_cstrIsSet(true);
}
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
}
}
|
apache/systemds | 34,115 | src/main/java/org/apache/sysds/runtime/matrix/data/LibMatrixDNNPooling.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysds.runtime.matrix.data;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.concurrent.Callable;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.sysds.hops.OptimizerUtils;
import org.apache.sysds.runtime.codegen.LibSpoofPrimitives;
import org.apache.sysds.runtime.data.SparseBlock;
import org.apache.sysds.runtime.data.SparseRow;
import org.apache.sysds.runtime.matrix.data.LibMatrixDNN.PoolingType;
import org.apache.sysds.runtime.matrix.data.LibMatrixDNNHelper.CellIndex3;
/**
* This class contains the set of operators used for performing pooling
*/
public class LibMatrixDNNPooling {
protected static final Log LOG = LogFactory.getLog(LibMatrixDNNPooling.class.getName());
/**
* Factory method that returns list of callable tasks for performing pooling operation
*
* @param params convolution parameters
* @param poolType type of pooling
* @return list of callable tasks for performing pooling operation
*/
public static ArrayList<Callable<Long>> getPoolingWorkers(DnnParameters params, PoolingType poolType) {
ArrayList<Callable<Long>> ret = new ArrayList<>();
// Try to create twice as many tasks as threads for improved load balance
int k = OptimizerUtils.getConstrainedNumThreads(params.numThreads);
int taskSize = (int)(Math.ceil((double)params.N / k / 2));
for(int i = 0; i*taskSize < params.N; i++) {
if(params.input1.isInSparseFormat())
ret.add(new SparsePooling(i*taskSize, Math.min((i+1)*taskSize, params.N), params, poolType));
else
ret.add(new DensePooling(i*taskSize, Math.min((i+1)*taskSize, params.N), params, poolType));
}
return ret;
}
/**
* Factory method that returns list of callable tasks for performing maxpooling backward operation
*
* @param params convolution parameters
* @param performReluBackward whether to perform ReLU backward
* @param poolType type of pooling operation to perform
* @return list of callable tasks for performing maxpooling backward operation
*/
public static ArrayList<Callable<Long>> getPoolingBackwardWorkers(DnnParameters params, boolean performReluBackward, PoolingType poolType) {
ArrayList<Callable<Long>> ret = new ArrayList<>();
// Try to create twice as many tasks as threads for improved load balance
int k = OptimizerUtils.getConstrainedNumThreads(params.numThreads);
int taskSize = (int)(Math.ceil((double)params.N / k / 2));
if(poolType == PoolingType.MAX) {
boolean sparse1 = params.input1.isInSparseFormat();
boolean sparse2 = params.input2.isInSparseFormat();
for(int i = 0; i*taskSize < params.N; i++) {
if( !sparse1 && !sparse2 )
ret.add(new PoolingBackwardDenseDense(i*taskSize, Math.min((i+1)*taskSize, params.N), params, performReluBackward));
else if( !sparse1 && sparse2 )
ret.add(new PoolingBackwardDenseSparse(i*taskSize, Math.min((i+1)*taskSize, params.N), params, performReluBackward));
else if( sparse1 && !sparse2 )
ret.add(new PoolingBackwardSparseDense(i*taskSize, Math.min((i+1)*taskSize, params.N), params, performReluBackward));
else if( sparse1 && sparse2 )
ret.add(new PoolingBackwardSparseSparse(i*taskSize, Math.min((i+1)*taskSize, params.N), params, performReluBackward));
}
}
else {
boolean sparse = params.input2.isInSparseFormat();
for(int i = 0; i*taskSize < params.N; i++) {
if( !sparse )
ret.add(new AvgPoolingBackwardDense(i*taskSize, Math.min((i+1)*taskSize, params.N), params));
else
ret.add(new AvgPoolingBackwardSparse(i*taskSize, Math.min((i+1)*taskSize, params.N), params));
}
}
return ret;
}
public static void poolingDenseStride1Pad0(PoolingType pType, double minVal, double pFact, double[] in,
double[] out, int rl, int ru, int ii, int oi, int C, int P, int Q, int R, int S, int H, int W) {
boolean max = (pType == PoolingType.MAX);
int CHW = C * H * W;
if( P == 1 && Q == 1 && W == 1 ) {
//quick-path w/o materialized index arrays and
//simplified inner loops for P = 1, Q = 1, W = 1
int lenh = Math.min(R,H);
for(int i = rl; i < ru; i++, oi+=C)
for (int c = 0, off=ii+(i-rl)*CHW; c < C; c++, off+=H) {
out[oi+c] = max ? max(minVal, in, off, lenh) :
avg(minVal, in, off, lenh, pFact);
}
}
else {
int CPQ = C * P * Q, HW = H * W;
Arrays.fill(out, rl*CPQ, ru*CPQ, minVal);
//quick-path w/o materialized index arrays
for(int i = rl; i < ru; i++)
for (int c = 0, off=ii+(i-rl)*CHW, oix=oi+(i-rl)*CPQ; c < C; c++, off+=HW)
for (int p = 0; p < P; p++, oix+=Q)
for (int h = p; h < Math.min(p+R,H); h++)
for (int q = 0, off2=off+h*W; q < Q; q++) {
out[oix+q] = max ? max(out[oix+q], in, off2+q, Math.min(S,W-q)) :
avg(out[oix+q], in, off2+q, Math.min(S,W-q), pFact);
}
}
}
private static class DensePooling implements Callable<Long>
{
private final int _rl, _ru;
private final DnnParameters _params;
private final PoolingType _poolingType;
private final double _poolingMultiplier;
public DensePooling(int rl, int ru, DnnParameters params, PoolingType poolingType) {
_rl = rl; _ru = ru;
_params = params;
_poolingType = poolingType;
_poolingMultiplier = 1d/(params.R*params.S);
}
@Override
public Long call() throws Exception {
final int C = _params.C, P = _params.P, Q = _params.Q;
final int R = _params.R, S = _params.S, H = _params.H, W = _params.W;
final int HW = _params.H*_params.W;
final int CHW = _params.C*_params.H*_params.W;
final int CPQ = C*P*Q;
double[] in = _params.input1.getDenseBlockValues();
double[] out = _params.output.getDenseBlockValues();
double minValForMaxPoolOperations = _poolingType == PoolingType.AVG ? 0 : _params.minValForMaxPoolOperations;
boolean max = (_poolingType == PoolingType.MAX);
if( _params.isStride1Pad0() ) {
poolingDenseStride1Pad0(_poolingType, minValForMaxPoolOperations,
_poolingMultiplier, in, out, _rl, _ru, _rl*CHW, _rl*CPQ, C, P, Q, R, S, H, W);
}
else { //general case
//thread-local initialization of output block
Arrays.fill(out, _rl*CPQ, _ru*CPQ, minValForMaxPoolOperations);
int[] hl = _params.start_indexes_h, hu = _params.end_indexes_h;
int[] wl = _params.start_indexes_w, wu = _params.end_indexes_w;
for(int i = _rl; i < _ru; i++)
for (int c = 0, off=i*CHW, oix=i*CPQ; c < C; c++, off+=HW)
for (int p = 0; p < P; p++, oix+=Q)
for (int h = hl[p]; h < hu[p]; h++)
for (int q = 0, off2=off+h*W; q < Q; q++) {
out[oix+q] = max ? max(out[oix+q], in, off2+wl[q], wu[q]-wl[q]) :
avg(out[oix+q], in, off2+wl[q], wu[q]-wl[q], _poolingMultiplier);
}
}
//thread-local recomputation of non-zeros
return _params.output.recomputeNonZeros(_rl, _ru-1);
}
}
private static class SparsePooling implements Callable<Long>
{
private final int _rl, _ru;
private final DnnParameters _params;
private double [] outputArray;
private final int C, P, Q, W, H, CPQ, PQ;
private final PoolingType _poolingType;
private final double _poolingMultiplier;
public SparsePooling(int rl, int ru, DnnParameters params, PoolingType poolingType) {
_rl = rl; _ru = ru;
_params = params;
outputArray = params.output.getDenseBlockValues();
C = params.C; P = params.P; Q = params.Q; H = params.H;
W = params.W;
CPQ = C*P*Q;
PQ = P*Q;
_poolingType = poolingType;
_poolingMultiplier = Math.pow(params.R*params.S, -1);
}
@Override
public Long call() throws Exception {
//thread-local initialization of output block
if(_poolingType == PoolingType.MAX)
Arrays.fill(outputArray, _rl *CPQ, _ru*CPQ, _params.minValForMaxPoolOperations);
for(int n = _rl; n < _ru; n++) {
if( !_params.input1.sparseBlock.isEmpty(n) ) {
final int apos = _params.input1.sparseBlock.pos(n);
final int alen = _params.input1.sparseBlock.size(n);
final int [] aix = _params.input1.sparseBlock.indexes(n);
final double [] avals = _params.input1.sparseBlock.values(n);
int chw = 0; int index = apos;
for (int c = 0; c < C; c++) {
final int outOffset = n*CPQ + c*PQ;
for(int h = 0; h < H; h++) {
for(int w = 0; w < W; w++, chw++) {
// Take into account zero values as well
double nchwVal = 0;
if(aix[index] == chw) {
nchwVal = avals[index++];
// Ensure that we satisfy the condition index < apos+alen
if(index >= apos+alen) index--;
}
if(_poolingType == PoolingType.MAX) {
// Perform maxpooling without binary search :)
// Tradeoff as compared to dense maxpooling:
// In dense maxpooling, iteration space CPQHW where H and W iterations are restricted by _params.start_indexes_h[p]
// and are eligible for JIT optimizations.
// In sparse maxpooling, iteration space CHWPQ without HW restrictions.
for (int p = 0; p < P; p++) {
if(h >= _params.start_indexes_h[p] && h < _params.end_indexes_h[p]) {
final int outOffsetWithp = outOffset + p*Q;
for (int q = 0; q < Q; q++) {
if(w >= _params.start_indexes_w[q] && w < _params.end_indexes_w[q]) {
outputArray[outOffsetWithp + q] = Math.max(outputArray[outOffsetWithp + q], nchwVal);
}
}
}
}
}
else {
for (int p = 0; p < P; p++) {
if(h >= _params.start_indexes_h[p] && h < _params.end_indexes_h[p]) {
final int outOffsetWithp = outOffset + p*Q;
for (int q = 0; q < Q; q++) {
if(w >= _params.start_indexes_w[q] && w < _params.end_indexes_w[q]) {
outputArray[outOffsetWithp + q] += _poolingMultiplier*nchwVal;
}
}
}
}
}
}
}
}
}
else {
// Empty input image
Arrays.fill(outputArray, n*CPQ, (n+1)*CPQ, 0);
}
}
//thread-local recomputation of non-zeros
return _params.output.recomputeNonZeros(_rl, _ru-1);
}
}
//BACKWARD
/**
* Performs the avgpooling backward operation for dense error (dout)
*/
private static class AvgPoolingBackwardDense implements Callable<Long>
{
public int _rl; public int _ru;
private final DnnParameters _params;
double [] doutArray;
MatrixBlock output;
final int C; final int CHW; final int P; final int Q; final int HW; final int CPQ; final int PQ;
final double _poolingMultiplier;
public AvgPoolingBackwardDense(int rl, int ru, DnnParameters params) {
_rl = rl; _ru = ru;
_params = params;
doutArray = params.input2.getDenseBlockValues();
output = params.output;
C = params.C; CHW = params.C*params.H*params.W; HW = params.H*params.W;
P = params.P; Q = params.Q; CPQ = params.C*params.P*params.Q;
PQ = params.P*params.Q;
_poolingMultiplier = Math.pow(params.R*params.S, -1);
if (doutArray == null || output.getDenseBlock() == null )
throw new RuntimeException("Incorrect usage: empty inputs");
}
@Override
public Long call() throws Exception {
double[] out = output.getDenseBlockValues();
for(int n = _rl; n < _ru; n++) {
for (int c = 0; c < C; c++) {
final int inputOffset = n*CHW + c*HW;
final int outputOffset = n*CPQ + c*PQ;
for (int p = 0; p < P; p++) {
for (int q = 0; q < Q; q++) {
int start_index_h = _params.start_indexes_h[p];
int end_index_h = _params.end_indexes_h[p];
int start_index_w = _params.start_indexes_w[q];
int end_index_w = _params.end_indexes_w[q];
for (int h = start_index_h; h < end_index_h; h++) {
for (int w = start_index_w; w < end_index_w; w++) {
out[inputOffset + h*_params.W + w] += _poolingMultiplier*doutArray[outputOffset + p * Q + q];
}
}
}
}
}
}
//thread-local nnz maintenance
return output.recomputeNonZeros(_rl, _ru-1);
}
}
/**
* Performs the maxpooling backward operation for dense input and dense error (dout)
*/
private static class PoolingBackwardDenseDense implements Callable<Long>
{
public int _rl; public int _ru;
private final DnnParameters _params;
boolean performReluBackward;
double [] inputArray, doutArray;
MatrixBlock output;
int C; int CHW; int P; int Q; int HW; int CPQ; int PQ;
public PoolingBackwardDenseDense(int rl, int ru, DnnParameters params, boolean performReluBackward) {
_rl = rl; _ru = ru;
_params = params;
this.performReluBackward = performReluBackward;
inputArray = params.input1.getDenseBlockValues();
doutArray = params.input2.getDenseBlockValues();
output = params.output;
C = params.C; CHW = params.C*params.H*params.W; HW = params.H*params.W;
P = params.P; Q = params.Q; CPQ = params.C*params.P*params.Q;
PQ = params.P*params.Q;
if (inputArray == null || doutArray == null )
throw new RuntimeException("Incorrect usage: empty inputs");
}
@Override
public Long call() throws Exception {
if(output.isInSparseFormat()){
SparseBlock out = output.getSparseBlock();
final int[] i = new int[Q];
final double[] v = new double[Q];
for(int n = _rl; n < _ru; n++){
// each row correspond to a single batch element.
// here we allocate the sparse row.
out.allocate(n, P*Q*C);
final SparseRow elm = out.get(n);
final int nCHW = n*CHW;
// tmp arrays for sorting.
for(int c = 0; c < C; c++){
// each channel processed.
final int inputOffset = nCHW + c*HW;
final int outputOffset = n*CPQ + c*PQ;
for(int p = 0; p < P; p++){
int pointer = 0;
for(int q = 0; q < Q; q++){
int maxIndex = getMaxIndex(p, q, inputOffset, inputArray, _params, performReluBackward);
if(maxIndex != -1){
i[pointer] = maxIndex - nCHW;
v[pointer] = doutArray[outputOffset + p * Q + q];
pointer++;
}
}
add(elm,i,v,pointer);
}
}
}
}
else{
double[] out = output.getDenseBlockValues();
for(int n = _rl; n < _ru; n++) {
for (int c = 0; c < C; c++) {
final int inputOffset = n*CHW + c*HW;
final int outputOffset = n*CPQ + c*PQ;
for (int p = 0; p < P; p++) {
for (int q = 0; q < Q; q++) {
int maxIndex = getMaxIndex(p, q, inputOffset, inputArray, _params, performReluBackward);
if(maxIndex != -1)
out[maxIndex] += doutArray[outputOffset + p * Q + q];
}
}
}
}
}
//thread-local nnz maintenance
// we know the number of nonzeros in the output because max pooling backwards only ouput one value per kernel.
return P*Q*C*(long)(_ru - _rl);
}
}
/**
* Performs the maxpooling backward operation for dense input and sparse error (dout)
*/
private static class PoolingBackwardDenseSparse implements Callable<Long>
{
public int _rl; public int _ru;
private final DnnParameters _params;
MatrixBlock output;
boolean performReluBackward;
double [] inputArray; MatrixBlock dout;
final int CHW; final int P; final int Q; final int HW; final int C;
public PoolingBackwardDenseSparse(int rl, int ru, DnnParameters params, boolean performReluBackward) {
_rl = rl; _ru = ru;
_params = params;
this.performReluBackward = performReluBackward;
inputArray = params.input1.getDenseBlockValues();
dout = params.input2;
output = params.output;
C = params.C;
CHW = params.C*params.H*params.W; HW = params.H*params.W;
P = params.P; Q = params.Q;
if (inputArray == null )
throw new RuntimeException("Incorrect usage: empty inputs");
if (!params.input2.isInSparseFormat())
throw new RuntimeException("Incorrect usage: Call optimized versions");
}
@Override
public Long call() throws Exception {
SparseBlock sblock = dout.sparseBlock;
if(output.isInSparseFormat()){
SparseBlock out = output.getSparseBlock();
final int[] i = new int[Q];
final double[] v = new double[Q];
for(int n = _rl; n < _ru; n++){
// each row correspond to a single batch element.
// here we allocate the sparse row.
if( sblock.isEmpty(n) ) continue;
out.allocate(n, P*Q*C);
final SparseRow elm = out.get(n);
final int apos = sblock.pos(n);
final int alen = sblock.size(n);
final int[] aix = sblock.indexes(n);
final double[] avals = sblock.values(n);
int oldP = 0;
int pointer = 0;
final int nCHW = n*CHW;
for(int j = apos; j < apos+alen; j++) {
final int tmp = aix[j] / Q;
final int inputOffset = nCHW + (tmp / P) * HW;
final int p = tmp % P;
final int q = aix[j] % Q;
if(p != oldP){
add(elm, i, v, pointer);
oldP = p;
pointer = 0;
}
int maxIndex = getMaxIndex(p, q, inputOffset, inputArray, _params, performReluBackward);
if(maxIndex != -1){
i[pointer] = maxIndex - nCHW;
v[pointer] = avals[j];
pointer++;
}
}
add(elm, i, v, pointer);
}
}
else {
CellIndex3 ix = new CellIndex3();
double[] out = output.getDenseBlockValues();
for(int n = _rl; n < _ru; n++) {
if( sblock.isEmpty(n) ) continue;
int apos = sblock.pos(n);
int alen = sblock.size(n);
int[] aix = sblock.indexes(n);
double[] avals = sblock.values(n);
for(int j = apos; j < apos+alen; j++) {
ix = LibMatrixDNNHelper.computeTensorIndexes(aix[j], P, Q, ix);
final int inputOffset = n*CHW + ix.ix1*HW;
int maxIndex = getMaxIndex(ix.ix2, ix.ix3,
inputOffset, inputArray, _params, performReluBackward);
if(maxIndex != -1)
out[maxIndex] += avals[j];
}
}
}
//thread-local nnz maintenance
return P*Q*C*(long)(_ru - _rl);
}
}
/**
* Performs the avgpooling backward operation for sparse error (dout)
*/
private static class AvgPoolingBackwardSparse implements Callable<Long>
{
public int _rl; public int _ru;
private final DnnParameters _params;
MatrixBlock output;
MatrixBlock dout;
int CHW; int P; int Q; int HW; final double _poolingMultiplier;
public AvgPoolingBackwardSparse(int rl, int ru, DnnParameters params) {
_rl = rl; _ru = ru;
_params = params;
dout = params.input2;
output = params.output;
CHW = params.C*params.H*params.W; HW = params.H*params.W;
P = params.P; Q = params.Q;
_poolingMultiplier = Math.pow(params.R*params.S, -1);
if (output.getDenseBlock() == null )
throw new RuntimeException("Incorrect usage: empty inputs");
}
@Override
public Long call() throws Exception {
CellIndex3 ix = new CellIndex3();
double[] out = output.getDenseBlockValues();
SparseBlock sblock = dout.sparseBlock;
for(int n = _rl; n < _ru; n++) {
if( sblock.isEmpty(n) ) continue;
int apos = sblock.pos(n);
int alen = sblock.size(n);
int[] aix = sblock.indexes(n);
double[] avals = sblock.values(n);
for(int j = apos; j < apos+alen; j++) {
ix = LibMatrixDNNHelper.computeTensorIndexes(aix[j], P, Q, ix);
int c = ix.ix1;
int p = ix.ix2;
int q = ix.ix3;
final int inputOffset = n*CHW + c*HW;
int start_index_h = _params.start_indexes_h[p];
int end_index_h = _params.end_indexes_h[p];
int start_index_w = _params.start_indexes_w[q];
int end_index_w = _params.end_indexes_w[q];
for (int h = start_index_h; h < end_index_h; h++) {
for (int w = start_index_w; w < end_index_w; w++) {
out[inputOffset + h*_params.W + w] += _poolingMultiplier*avals[j];
}
}
}
}
//thread-local nnz maintenance
return output.recomputeNonZeros(_rl, _ru-1);
}
}
/**
* Performs the maxpooling backward operation for sparse input and dense error (dout)
*
* Currently this is NOT IN USE since the sparse left part is forced dense.
* This is because this method is inefficient compared to our dense version.
*
*/
private static class PoolingBackwardSparseDense implements Callable<Long>
{
private final int _rl, _ru;
private final DnnParameters _params;
private final boolean reluBack;
protected final MatrixBlock doutput, output;
protected PoolingBackwardSparseDense(int rl, int ru, DnnParameters params, boolean relu, MatrixBlock dout, MatrixBlock out) {
_rl = rl; _ru = ru;
_params = params;
reluBack = relu;
doutput = dout;
output = out;
}
public PoolingBackwardSparseDense(int rl, int ru, DnnParameters params, boolean relu) {
this(rl, ru, params, relu, params.input2, params.output);
if (doutput.getDenseBlock() == null )
throw new RuntimeException("Incorrect usage: empty inputs");
if (!params.input1.isInSparseFormat())
throw new RuntimeException("Incorrect usage: sparse input1 expected");
}
@Override
public Long call() throws Exception
{
final int P = _params.P, Q = _params.Q, W = _params.W;
final int C = _params.C, R = _params.R, S = _params.S;
final int padh = _params.pad_h, padw = _params.pad_w;
final int strideh = _params.stride_h, stridew = _params.stride_w;
final int PQ = _params.P * _params.Q;
final int CPQ = _params.C * _params.P * _params.Q;
final int HW = _params.H * _params.W;
final int CHW = _params.C * _params.H * _params.W;
//allocate auxiliary data structures
double[] maxVal = new double[PQ];
int[] maxIx = new int[PQ];
for(int n = _rl; n < _ru; n++) {
for (int c = 0; c < C; c++) {
//step 1: perform maxpooling w/ index maintenance in a
//single, sequential pass over the sparse input matrix
boolean empty = maxpoolingForward(maxVal, maxIx, n, c,
padh, padw, strideh, stridew, C, P, Q, R, S, HW, W);
if(!empty){
//step 2: perform maxpooling backward
if(output.isInSparseFormat())
maxpoolingBackwardSparse(maxIx, c*HW, n, c, C, Q, P, CPQ);
else
maxpoolingBackwardDense(maxIx, n*CHW + c*HW, n, c, C, Q, PQ, CPQ);
}
}
}
//thread-local nnz maintenance
return P*Q*C*(long)(_ru - _rl);
}
protected boolean maxpoolingForward(double[] maxVal, int[] maxIx, int n, int c, int padh, int padw, int strideh, int stridew, int C, int P, int Q, int R, int S, int HW, int W) {
SparseBlock sblock = _params.input1.getSparseBlock();
if( !sblock.isEmpty(n) ) {
Arrays.fill(maxVal, -Double.MAX_VALUE);
int apos = sblock.pos(n);
int alen = sblock.size(n);
int[] aix = sblock.indexes(n);
double[] avals = sblock.values(n);
//find channel start and end, w/ robustness for non-existing entries
int cpos = (c==0) ? 0 : sblock.posFIndexGTE(n, c*HW);
int cpos2 = (c+1==C) ? alen : sblock.posFIndexGTE(n, (c+1)*HW);
cpos = (cpos>=0) ? cpos : alen;
cpos2 = (cpos2>=0) ? cpos2 : alen;
int lastix = c*HW-1;
for(int j=apos+cpos; j<apos+cpos2; j++) {
//handle skipped zero values
update0(lastix+1, aix[j], maxVal, maxIx, padh, padw, strideh, stridew, P, Q, R, S, HW, W);
//handle current non-zero value
int h = (aix[j] % HW) / W;
int w = aix[j] % W;
double val = reluBack && avals[j] < 0 ? 0 : avals[j];
update(val, maxVal, maxIx, h, w, padh, padw, strideh, stridew, P, Q, R, S, W);
//memoize last seen index
lastix = aix[j];
}
//handle skipped zero values at end of row
update0(lastix+1, (c+1)*HW, maxVal, maxIx, padh, padw, strideh, stridew, P, Q, R, S, HW, W);
return false;
}
else {
return true;
}
}
protected void maxpoolingBackwardDense(int[] maxIx, int outOffset, int n, int c, int C, int Q, int PQ, int CPQ) {
double[] dout = doutput.getDenseBlockValues();
double[] out = output.getDenseBlockValues();
final int doutOffset = n*CPQ + c*PQ;
for( int pq = 0; pq < PQ; pq++ )
out[ outOffset + maxIx[pq] ] += dout[ doutOffset + pq ];
}
protected void maxpoolingBackwardSparse(int[] maxIx, int offset, int n, int c, int C, int Q, int P, int CPQ) {
double[] dout = doutput.getDenseBlockValues();
SparseBlock out = output.getSparseBlock();
out.allocate(n, P * Q);
SparseRow row = out.get(n);
final int doutOffset = n*CPQ + c*P * Q;
int pq = 0;
for( int p = 0; p < P; p++ ){
for(int q = 0; q < Q; q++){
row.add(maxIx[pq] + offset ,dout[ doutOffset + pq ]);
pq++;
}
}
}
private static void update0(int lix, int uix, double[] maxVal, int[] maxIx, int padh, int padw, int strideh, int stridew, int P, int Q, int R, int S, int HW, int W) {
//TODO exploit constant value and overlap for potential early abort
for(int i = lix; i<uix; i++)
update(0, maxVal, maxIx, (i%HW)/W, i%W, padh, padw, strideh, stridew, P, Q, R, S, W);
}
private static void update(double val, double[] maxVal, int[] maxIx, int h, int w, int padh, int padw, int strideh, int stridew, int P, int Q, int R, int S, int W) {
//determine lower and upper bounds for p and q
//(see fillIndexesArray, solved for p and q, reversed)
int lp = Math.max((h+padh-R+strideh)/strideh, 0);
int up = Math.min((h+padh+strideh)/strideh, P);
int lq = Math.max((w+padw-S+stridew)/stridew, 0);
int uq = Math.min((w+padw+stridew)/stridew, Q);
//maintain max index for all relevant p and q
int maxIndex = h * W + w;
for(int p = lp; p < up; p++)
for(int q = lq; q < uq; q++) {
int ix = p * Q + q;
if( maxVal[ix] < val ) {
maxVal[ix] = val;
maxIx[ix] = maxIndex;
}
}
}
}
/**
* Performs the maxpooling backward operation for sparse input and sparse error (dout)
*
* Currently this is NOT IN USE since the sparse left part is forced dense.
* This is because this method is inefficient compared to our dense version.
*
*/
private static class PoolingBackwardSparseSparse extends PoolingBackwardSparseDense
{
public PoolingBackwardSparseSparse(int rl, int ru, DnnParameters params, boolean relu) {
super(rl, ru, params, relu, params.input2, params.output);
if (!params.input1.isInSparseFormat() || !params.input2.isInSparseFormat())
throw new RuntimeException("Incorrect usage: Call optimized versions");
}
@Override
protected void maxpoolingBackwardDense(int[] maxIx, int outOffset, int n, int c, int C, int Q, int PQ, int CPQ) {
SparseBlock sblock = doutput.getSparseBlock();
double[] out = output.getDenseBlockValues();
if( sblock.isEmpty(n) )
return;
int apos = sblock.pos(n);
int alen = sblock.size(n);
int[] aix = sblock.indexes(n);
double[] avals = sblock.values(n);
//find channel start and end, w/ robustness for non-existing entries
int cpos = (c==0) ? 0 : sblock.posFIndexGTE(n, c*PQ);
int cpos2 = (c+1==C) ? alen : sblock.posFIndexGTE(n, (c+1)*PQ);
cpos = (cpos>=0) ? cpos : alen;
cpos2 = (cpos2>=0) ? cpos2 : alen;
for(int j = apos+cpos; j<apos+cpos2; j++) {
int p = (aix[j] % PQ) / Q;
int q = aix[j] % Q;
int pq = p * Q + q;
out[ outOffset + maxIx[pq] ] += avals[j];
}
}
@Override
protected void maxpoolingBackwardSparse(int[] maxIx, int offset, int n, int c, int C, int Q, int P, int CPQ) {
SparseBlock sblock = doutput.getSparseBlock();
if( sblock.isEmpty(n) )
return;
final int PQ = P*Q;
SparseBlock out = output.getSparseBlock();
out.allocate(n, PQ);
SparseRow row = out.get(n);
int apos = sblock.pos(n);
int alen = sblock.size(n);
int[] aix = sblock.indexes(n);
double[] avals = sblock.values(n);
//find channel start and end, w/ robustness for non-existing entries
int cpos = (c==0) ? 0 : sblock.posFIndexGTE(n, c*PQ);
int cpos2 = (c+1==C) ? alen : sblock.posFIndexGTE(n, (c+1)*PQ);
cpos = (cpos>=0) ? cpos : alen;
cpos2 = (cpos2>=0) ? cpos2 : alen;
for(int j = apos+cpos; j<apos+cpos2; j++) {
int p = (aix[j] % PQ) / Q;
int q = aix[j] % Q;
int pq = p * Q + q;
row.add( maxIx[pq] + offset, avals[j]);
}
}
}
private static double avg(final double aval, double[] b, final int bi, final int len, final double poolingMultiplier) {
return LibSpoofPrimitives.vectSum(b, bi, len) * poolingMultiplier + aval;
}
private static double max(final double aval, double[] b, final int bi, final int len) {
double ret = aval;
for( int i = bi; i < bi+len; i++ )
ret = Math.max(ret, b[i]);
return ret;
}
/**
* Returns the index of cell with maximum value. This method is optimized for dense input
*
* @param p output feature map height
* @param q output feature map width
* @param inputOffset offset to be used for input index
* @param inputArray input array
* @param params convolution parameters
* @param performReluBackward perform ReLU backward
* @return index of cell with maximum value
*/
private static int getMaxIndex(int p, int q, int inputOffset, double [] inputArray, DnnParameters params, boolean performReluBackward) {
int start_index_h = params.start_indexes_h[p];
int end_index_h = params.end_indexes_h[p];
int start_index_w = params.start_indexes_w[q];
int end_index_w = params.end_indexes_w[q];
int maxIndex = -1;
double maxVal = performReluBackward ? 0 : Double.NEGATIVE_INFINITY;
// Note: We do not treat pad as zero and hence we don't do:
// maxVal = 0
// if start_index_h < 0 || start_index_w < 0 || end_index_h >= params.H || end_index_w >= params.W
// Find maxIndex
for (int h = start_index_h; h < end_index_h; h++) {
for (int w = start_index_w; w < end_index_w; w++) {
final int idx = inputOffset + h*params.W + w;
final double currDoutVal = inputArray[idx];
if(maxVal < currDoutVal) {
maxIndex = idx;
maxVal = currDoutVal;
}
}
}
return maxVal == 0 && performReluBackward ? -1 : maxIndex;
}
/**
* Add all elements in the arrays to the sparse row. It is guaranteed that all i is larger than all indexes already contained in row.
*
* @param row the row to append to
* @param i the indexes to append
* @param v the values to append
*/
private static void add(SparseRow row, int[] i, double[] v, int size){
// sort based on the i array.
sort(i,v, size);
for(int x = 0; x < size; x++){
row.append(i[x], v[x]);
}
}
/**
* Use sorting networks for small arrays.
* Note small arrays here is less than 32.
*
* The basic idea is to use Network sorting, that is the theoretical
* fewest compare and swap operations possible for a specific size array.
*
* @param i indexes to sort by
* @param v the values to sort along side
*/
private static void sort(int[] i , double[] v, int size){
if(size > 32)
LOG.warn("Not a optimal size for small array sort " + size);
switch (size) {
case 1: break;
case 2: comp(i,v,0,1); break;
case 3: sort3(i,v); break;
case 4: sort4(i,v); break;
case 5: sort5(i,v); break;
case 6: sort6(i,v); break;
case 7: sort7(i,v); break;
default:
// Most cases are handled by the sorting of smaller arrays,
// but just in case we have a insertion sort here.
// Since the array is already semi sorted, it is okay. But not ideal once
// we see larger arrays.
// Larger arrays only occur if the input data allow many kernels in the horizontal
// dimension.
insertSort(i,v, size);
break;
}
}
private static void sort3(int[] i, double[] v){
// 3 moves
comp(i,v,0,2);
comp(i,v,0,1);
comp(i,v,1,2);
}
private static void sort4(int[] i, double[] v){
// 5 moves
// block 1
comp(i,v,0,2);
comp(i,v,1,3);
// block 2
comp(i,v,0,1);
comp(i,v,2,3);
// block 3
comp(i,v,1,2);
}
private static void sort5(int[] i, double[] v){
// 9 moves
// block 1
comp(i,v,0,1);
comp(i,v,2,3);
// block 2
comp(i,v,1,3);
comp(i,v,2,4);
// block 3
comp(i,v,1,4);
comp(i,v,0,2);
// block 4
comp(i,v,1,2);
comp(i,v,3,4);
// block 5
comp(i,v,2,3);
}
private static void sort6(int[] i, double[] v){
// 12 moves
// block 1
comp(i,v,0,1);
comp(i,v,2,3);
comp(i,v,4,5);
// block 2
comp(i,v,1,3);
// block 3
comp(i,v,0,4);
// block 4
comp(i,v,1,3);
// block 5
comp(i,v,1,5);
// block 6
comp(i,v,2,4);
// block 7
comp(i,v,1,2);
comp(i,v,3,5);
// block 8
comp(i,v,3,4);
// block 9
comp(i,v,2,3);
}
private static void sort7(int[] i, double[] v){
// 16 moves.
// block 1
comp(i,v,0,1);
comp(i,v,2,3);
comp(i,v,4,5);
// block 2
comp(i,v,0,6);
// block 3
comp(i,v,2,4);
// block 4
comp(i,v,0,2);
// block 5
comp(i,v,1,3);
comp(i,v,5,6);
// block 6
comp(i,v,1,4);
// block 7
comp(i,v,2,5);
// block 8
comp(i,v,1,2);
comp(i,v,4,5);
// block 9
comp(i,v,2,4);
// block 10
comp(i,v,3,6);
// block 11
comp(i,v,3,5);
// block 12
comp(i,v,3,4);
}
private static void insertSort(int[] i, double[] v, int size){
int p, k, j;
double t;
for(p = 1; p < size; p++){
k = i[p];
t = v[p];
j = p -1;
while(j >= 0 && i[j] > k){
i[j+1] = i[j];
v[j+1] = v[j];
j = j-1;
}
i[j+1] = k;
v[j+1] = t;
}
}
private static void comp(int[] i , double[] v, int f, int t){
if(i[f] > i[t])
swap(i,v,f,t);
}
private static void swap(int[] i , double[] v, int f, int t){
int tmpI = i[f];
double tmpV = v[f];
i[f] = i[t];
v[f] = v[t];
i[t] = tmpI;
v[t] = tmpV;
}
}
|
googleapis/google-cloud-java | 36,990 | java-recommender/proto-google-cloud-recommender-v1beta1/src/main/java/com/google/cloud/recommender/v1beta1/InsightStateInfo.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/recommender/v1beta1/insight.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.recommender.v1beta1;
/**
*
*
* <pre>
* Information related to insight state.
* </pre>
*
* Protobuf type {@code google.cloud.recommender.v1beta1.InsightStateInfo}
*/
public final class InsightStateInfo extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.recommender.v1beta1.InsightStateInfo)
InsightStateInfoOrBuilder {
private static final long serialVersionUID = 0L;
// Use InsightStateInfo.newBuilder() to construct.
private InsightStateInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private InsightStateInfo() {
state_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new InsightStateInfo();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.recommender.v1beta1.InsightOuterClass
.internal_static_google_cloud_recommender_v1beta1_InsightStateInfo_descriptor;
}
@SuppressWarnings({"rawtypes"})
@java.lang.Override
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(
int number) {
switch (number) {
case 2:
return internalGetStateMetadata();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.recommender.v1beta1.InsightOuterClass
.internal_static_google_cloud_recommender_v1beta1_InsightStateInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.recommender.v1beta1.InsightStateInfo.class,
com.google.cloud.recommender.v1beta1.InsightStateInfo.Builder.class);
}
/**
*
*
* <pre>
* Represents insight state.
* </pre>
*
* Protobuf enum {@code google.cloud.recommender.v1beta1.InsightStateInfo.State}
*/
public enum State implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Unspecified state.
* </pre>
*
* <code>STATE_UNSPECIFIED = 0;</code>
*/
STATE_UNSPECIFIED(0),
/**
*
*
* <pre>
* Insight is active. Content for ACTIVE insights can be updated by Google.
* ACTIVE insights can be marked DISMISSED OR ACCEPTED.
* </pre>
*
* <code>ACTIVE = 1;</code>
*/
ACTIVE(1),
/**
*
*
* <pre>
* Some action has been taken based on this insight. Insights become
* accepted when a recommendation derived from the insight has been marked
* CLAIMED, SUCCEEDED, or FAILED. ACTIVE insights can also be marked
* ACCEPTED explicitly. Content for ACCEPTED insights is immutable. ACCEPTED
* insights can only be marked ACCEPTED (which may update state metadata).
* </pre>
*
* <code>ACCEPTED = 2;</code>
*/
ACCEPTED(2),
/**
*
*
* <pre>
* Insight is dismissed. Content for DISMISSED insights can be updated by
* Google. DISMISSED insights can be marked as ACTIVE.
* </pre>
*
* <code>DISMISSED = 3;</code>
*/
DISMISSED(3),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Unspecified state.
* </pre>
*
* <code>STATE_UNSPECIFIED = 0;</code>
*/
public static final int STATE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Insight is active. Content for ACTIVE insights can be updated by Google.
* ACTIVE insights can be marked DISMISSED OR ACCEPTED.
* </pre>
*
* <code>ACTIVE = 1;</code>
*/
public static final int ACTIVE_VALUE = 1;
/**
*
*
* <pre>
* Some action has been taken based on this insight. Insights become
* accepted when a recommendation derived from the insight has been marked
* CLAIMED, SUCCEEDED, or FAILED. ACTIVE insights can also be marked
* ACCEPTED explicitly. Content for ACCEPTED insights is immutable. ACCEPTED
* insights can only be marked ACCEPTED (which may update state metadata).
* </pre>
*
* <code>ACCEPTED = 2;</code>
*/
public static final int ACCEPTED_VALUE = 2;
/**
*
*
* <pre>
* Insight is dismissed. Content for DISMISSED insights can be updated by
* Google. DISMISSED insights can be marked as ACTIVE.
* </pre>
*
* <code>DISMISSED = 3;</code>
*/
public static final int DISMISSED_VALUE = 3;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static State valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static State forNumber(int value) {
switch (value) {
case 0:
return STATE_UNSPECIFIED;
case 1:
return ACTIVE;
case 2:
return ACCEPTED;
case 3:
return DISMISSED;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<State> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<State> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<State>() {
public State findValueByNumber(int number) {
return State.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.recommender.v1beta1.InsightStateInfo.getDescriptor()
.getEnumTypes()
.get(0);
}
private static final State[] VALUES = values();
public static State valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private State(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.recommender.v1beta1.InsightStateInfo.State)
}
public static final int STATE_FIELD_NUMBER = 1;
private int state_ = 0;
/**
*
*
* <pre>
* Insight state.
* </pre>
*
* <code>.google.cloud.recommender.v1beta1.InsightStateInfo.State state = 1;</code>
*
* @return The enum numeric value on the wire for state.
*/
@java.lang.Override
public int getStateValue() {
return state_;
}
/**
*
*
* <pre>
* Insight state.
* </pre>
*
* <code>.google.cloud.recommender.v1beta1.InsightStateInfo.State state = 1;</code>
*
* @return The state.
*/
@java.lang.Override
public com.google.cloud.recommender.v1beta1.InsightStateInfo.State getState() {
com.google.cloud.recommender.v1beta1.InsightStateInfo.State result =
com.google.cloud.recommender.v1beta1.InsightStateInfo.State.forNumber(state_);
return result == null
? com.google.cloud.recommender.v1beta1.InsightStateInfo.State.UNRECOGNIZED
: result;
}
public static final int STATE_METADATA_FIELD_NUMBER = 2;
private static final class StateMetadataDefaultEntryHolder {
static final com.google.protobuf.MapEntry<java.lang.String, java.lang.String> defaultEntry =
com.google.protobuf.MapEntry.<java.lang.String, java.lang.String>newDefaultInstance(
com.google.cloud.recommender.v1beta1.InsightOuterClass
.internal_static_google_cloud_recommender_v1beta1_InsightStateInfo_StateMetadataEntry_descriptor,
com.google.protobuf.WireFormat.FieldType.STRING,
"",
com.google.protobuf.WireFormat.FieldType.STRING,
"");
}
@SuppressWarnings("serial")
private com.google.protobuf.MapField<java.lang.String, java.lang.String> stateMetadata_;
private com.google.protobuf.MapField<java.lang.String, java.lang.String>
internalGetStateMetadata() {
if (stateMetadata_ == null) {
return com.google.protobuf.MapField.emptyMapField(
StateMetadataDefaultEntryHolder.defaultEntry);
}
return stateMetadata_;
}
public int getStateMetadataCount() {
return internalGetStateMetadata().getMap().size();
}
/**
*
*
* <pre>
* A map of metadata for the state, provided by user or automations systems.
* </pre>
*
* <code>map<string, string> state_metadata = 2;</code>
*/
@java.lang.Override
public boolean containsStateMetadata(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
return internalGetStateMetadata().getMap().containsKey(key);
}
/** Use {@link #getStateMetadataMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.String> getStateMetadata() {
return getStateMetadataMap();
}
/**
*
*
* <pre>
* A map of metadata for the state, provided by user or automations systems.
* </pre>
*
* <code>map<string, string> state_metadata = 2;</code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, java.lang.String> getStateMetadataMap() {
return internalGetStateMetadata().getMap();
}
/**
*
*
* <pre>
* A map of metadata for the state, provided by user or automations systems.
* </pre>
*
* <code>map<string, string> state_metadata = 2;</code>
*/
@java.lang.Override
public /* nullable */ java.lang.String getStateMetadataOrDefault(
java.lang.String key,
/* nullable */
java.lang.String defaultValue) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetStateMetadata().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
* <pre>
* A map of metadata for the state, provided by user or automations systems.
* </pre>
*
* <code>map<string, string> state_metadata = 2;</code>
*/
@java.lang.Override
public java.lang.String getStateMetadataOrThrow(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetStateMetadata().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (state_
!= com.google.cloud.recommender.v1beta1.InsightStateInfo.State.STATE_UNSPECIFIED
.getNumber()) {
output.writeEnum(1, state_);
}
com.google.protobuf.GeneratedMessageV3.serializeStringMapTo(
output, internalGetStateMetadata(), StateMetadataDefaultEntryHolder.defaultEntry, 2);
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (state_
!= com.google.cloud.recommender.v1beta1.InsightStateInfo.State.STATE_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, state_);
}
for (java.util.Map.Entry<java.lang.String, java.lang.String> entry :
internalGetStateMetadata().getMap().entrySet()) {
com.google.protobuf.MapEntry<java.lang.String, java.lang.String> stateMetadata__ =
StateMetadataDefaultEntryHolder.defaultEntry
.newBuilderForType()
.setKey(entry.getKey())
.setValue(entry.getValue())
.build();
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, stateMetadata__);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.recommender.v1beta1.InsightStateInfo)) {
return super.equals(obj);
}
com.google.cloud.recommender.v1beta1.InsightStateInfo other =
(com.google.cloud.recommender.v1beta1.InsightStateInfo) obj;
if (state_ != other.state_) return false;
if (!internalGetStateMetadata().equals(other.internalGetStateMetadata())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + STATE_FIELD_NUMBER;
hash = (53 * hash) + state_;
if (!internalGetStateMetadata().getMap().isEmpty()) {
hash = (37 * hash) + STATE_METADATA_FIELD_NUMBER;
hash = (53 * hash) + internalGetStateMetadata().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.recommender.v1beta1.InsightStateInfo parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.recommender.v1beta1.InsightStateInfo parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.recommender.v1beta1.InsightStateInfo parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.recommender.v1beta1.InsightStateInfo parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.recommender.v1beta1.InsightStateInfo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.recommender.v1beta1.InsightStateInfo parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.recommender.v1beta1.InsightStateInfo parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.recommender.v1beta1.InsightStateInfo parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.recommender.v1beta1.InsightStateInfo parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.recommender.v1beta1.InsightStateInfo parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.recommender.v1beta1.InsightStateInfo parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.recommender.v1beta1.InsightStateInfo parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.recommender.v1beta1.InsightStateInfo prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Information related to insight state.
* </pre>
*
* Protobuf type {@code google.cloud.recommender.v1beta1.InsightStateInfo}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.recommender.v1beta1.InsightStateInfo)
com.google.cloud.recommender.v1beta1.InsightStateInfoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.recommender.v1beta1.InsightOuterClass
.internal_static_google_cloud_recommender_v1beta1_InsightStateInfo_descriptor;
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(
int number) {
switch (number) {
case 2:
return internalGetStateMetadata();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMutableMapFieldReflection(
int number) {
switch (number) {
case 2:
return internalGetMutableStateMetadata();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.recommender.v1beta1.InsightOuterClass
.internal_static_google_cloud_recommender_v1beta1_InsightStateInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.recommender.v1beta1.InsightStateInfo.class,
com.google.cloud.recommender.v1beta1.InsightStateInfo.Builder.class);
}
// Construct using com.google.cloud.recommender.v1beta1.InsightStateInfo.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
state_ = 0;
internalGetMutableStateMetadata().clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.recommender.v1beta1.InsightOuterClass
.internal_static_google_cloud_recommender_v1beta1_InsightStateInfo_descriptor;
}
@java.lang.Override
public com.google.cloud.recommender.v1beta1.InsightStateInfo getDefaultInstanceForType() {
return com.google.cloud.recommender.v1beta1.InsightStateInfo.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.recommender.v1beta1.InsightStateInfo build() {
com.google.cloud.recommender.v1beta1.InsightStateInfo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.recommender.v1beta1.InsightStateInfo buildPartial() {
com.google.cloud.recommender.v1beta1.InsightStateInfo result =
new com.google.cloud.recommender.v1beta1.InsightStateInfo(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.recommender.v1beta1.InsightStateInfo result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.state_ = state_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.stateMetadata_ = internalGetStateMetadata();
result.stateMetadata_.makeImmutable();
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.recommender.v1beta1.InsightStateInfo) {
return mergeFrom((com.google.cloud.recommender.v1beta1.InsightStateInfo) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.recommender.v1beta1.InsightStateInfo other) {
if (other == com.google.cloud.recommender.v1beta1.InsightStateInfo.getDefaultInstance())
return this;
if (other.state_ != 0) {
setStateValue(other.getStateValue());
}
internalGetMutableStateMetadata().mergeFrom(other.internalGetStateMetadata());
bitField0_ |= 0x00000002;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
state_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18:
{
com.google.protobuf.MapEntry<java.lang.String, java.lang.String> stateMetadata__ =
input.readMessage(
StateMetadataDefaultEntryHolder.defaultEntry.getParserForType(),
extensionRegistry);
internalGetMutableStateMetadata()
.getMutableMap()
.put(stateMetadata__.getKey(), stateMetadata__.getValue());
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int state_ = 0;
/**
*
*
* <pre>
* Insight state.
* </pre>
*
* <code>.google.cloud.recommender.v1beta1.InsightStateInfo.State state = 1;</code>
*
* @return The enum numeric value on the wire for state.
*/
@java.lang.Override
public int getStateValue() {
return state_;
}
/**
*
*
* <pre>
* Insight state.
* </pre>
*
* <code>.google.cloud.recommender.v1beta1.InsightStateInfo.State state = 1;</code>
*
* @param value The enum numeric value on the wire for state to set.
* @return This builder for chaining.
*/
public Builder setStateValue(int value) {
state_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Insight state.
* </pre>
*
* <code>.google.cloud.recommender.v1beta1.InsightStateInfo.State state = 1;</code>
*
* @return The state.
*/
@java.lang.Override
public com.google.cloud.recommender.v1beta1.InsightStateInfo.State getState() {
com.google.cloud.recommender.v1beta1.InsightStateInfo.State result =
com.google.cloud.recommender.v1beta1.InsightStateInfo.State.forNumber(state_);
return result == null
? com.google.cloud.recommender.v1beta1.InsightStateInfo.State.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Insight state.
* </pre>
*
* <code>.google.cloud.recommender.v1beta1.InsightStateInfo.State state = 1;</code>
*
* @param value The state to set.
* @return This builder for chaining.
*/
public Builder setState(com.google.cloud.recommender.v1beta1.InsightStateInfo.State value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
state_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Insight state.
* </pre>
*
* <code>.google.cloud.recommender.v1beta1.InsightStateInfo.State state = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearState() {
bitField0_ = (bitField0_ & ~0x00000001);
state_ = 0;
onChanged();
return this;
}
private com.google.protobuf.MapField<java.lang.String, java.lang.String> stateMetadata_;
private com.google.protobuf.MapField<java.lang.String, java.lang.String>
internalGetStateMetadata() {
if (stateMetadata_ == null) {
return com.google.protobuf.MapField.emptyMapField(
StateMetadataDefaultEntryHolder.defaultEntry);
}
return stateMetadata_;
}
private com.google.protobuf.MapField<java.lang.String, java.lang.String>
internalGetMutableStateMetadata() {
if (stateMetadata_ == null) {
stateMetadata_ =
com.google.protobuf.MapField.newMapField(StateMetadataDefaultEntryHolder.defaultEntry);
}
if (!stateMetadata_.isMutable()) {
stateMetadata_ = stateMetadata_.copy();
}
bitField0_ |= 0x00000002;
onChanged();
return stateMetadata_;
}
public int getStateMetadataCount() {
return internalGetStateMetadata().getMap().size();
}
/**
*
*
* <pre>
* A map of metadata for the state, provided by user or automations systems.
* </pre>
*
* <code>map<string, string> state_metadata = 2;</code>
*/
@java.lang.Override
public boolean containsStateMetadata(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
return internalGetStateMetadata().getMap().containsKey(key);
}
/** Use {@link #getStateMetadataMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.String> getStateMetadata() {
return getStateMetadataMap();
}
/**
*
*
* <pre>
* A map of metadata for the state, provided by user or automations systems.
* </pre>
*
* <code>map<string, string> state_metadata = 2;</code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, java.lang.String> getStateMetadataMap() {
return internalGetStateMetadata().getMap();
}
/**
*
*
* <pre>
* A map of metadata for the state, provided by user or automations systems.
* </pre>
*
* <code>map<string, string> state_metadata = 2;</code>
*/
@java.lang.Override
public /* nullable */ java.lang.String getStateMetadataOrDefault(
java.lang.String key,
/* nullable */
java.lang.String defaultValue) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetStateMetadata().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
* <pre>
* A map of metadata for the state, provided by user or automations systems.
* </pre>
*
* <code>map<string, string> state_metadata = 2;</code>
*/
@java.lang.Override
public java.lang.String getStateMetadataOrThrow(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetStateMetadata().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
public Builder clearStateMetadata() {
bitField0_ = (bitField0_ & ~0x00000002);
internalGetMutableStateMetadata().getMutableMap().clear();
return this;
}
/**
*
*
* <pre>
* A map of metadata for the state, provided by user or automations systems.
* </pre>
*
* <code>map<string, string> state_metadata = 2;</code>
*/
public Builder removeStateMetadata(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
internalGetMutableStateMetadata().getMutableMap().remove(key);
return this;
}
/** Use alternate mutation accessors instead. */
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.String> getMutableStateMetadata() {
bitField0_ |= 0x00000002;
return internalGetMutableStateMetadata().getMutableMap();
}
/**
*
*
* <pre>
* A map of metadata for the state, provided by user or automations systems.
* </pre>
*
* <code>map<string, string> state_metadata = 2;</code>
*/
public Builder putStateMetadata(java.lang.String key, java.lang.String value) {
if (key == null) {
throw new NullPointerException("map key");
}
if (value == null) {
throw new NullPointerException("map value");
}
internalGetMutableStateMetadata().getMutableMap().put(key, value);
bitField0_ |= 0x00000002;
return this;
}
/**
*
*
* <pre>
* A map of metadata for the state, provided by user or automations systems.
* </pre>
*
* <code>map<string, string> state_metadata = 2;</code>
*/
public Builder putAllStateMetadata(java.util.Map<java.lang.String, java.lang.String> values) {
internalGetMutableStateMetadata().getMutableMap().putAll(values);
bitField0_ |= 0x00000002;
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.recommender.v1beta1.InsightStateInfo)
}
// @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.InsightStateInfo)
private static final com.google.cloud.recommender.v1beta1.InsightStateInfo DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.recommender.v1beta1.InsightStateInfo();
}
public static com.google.cloud.recommender.v1beta1.InsightStateInfo getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<InsightStateInfo> PARSER =
new com.google.protobuf.AbstractParser<InsightStateInfo>() {
@java.lang.Override
public InsightStateInfo parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<InsightStateInfo> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<InsightStateInfo> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.recommender.v1beta1.InsightStateInfo getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hadoop | 37,137 | hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/impl/DfsClientConf.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.client.impl;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.Options.ChecksumCombineMode;
import org.apache.hadoop.fs.Options.ChecksumOpt;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.ReplicaAccessorBuilder;
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.util.ByteArrayManager;
import org.apache.hadoop.ipc.Client;
import org.apache.hadoop.util.DataChecksum;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.BlockWrite;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_BLOCK_SIZE_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_BLOCK_SIZE_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_BYTES_PER_CHECKSUM_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CHECKSUM_COMBINE_MODE_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CHECKSUM_COMBINE_MODE_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CHECKSUM_EC_SOCKET_TIMEOUT_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CHECKSUM_EC_SOCKET_TIMEOUT_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CHECKSUM_TYPE_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CHECKSUM_TYPE_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_CACHED_CONN_RETRY_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_CACHED_CONN_RETRY_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_DATANODE_RESTART_TIMEOUT_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_DATANODE_RESTART_TIMEOUT_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_DEAD_NODE_DETECTION_ENABLED_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_DEAD_NODE_DETECTION_ENABLED_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_DOMAIN_SOCKET_DATA_TRAFFIC;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_DOMAIN_SOCKET_DATA_TRAFFIC_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_KEY_PROVIDER_CACHE_EXPIRY_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_KEY_PROVIDER_CACHE_EXPIRY_MS;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_MARK_SLOWNODE_AS_BADNODE_THRESHOLD_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_MARK_SLOWNODE_AS_BADNODE_THRESHOLD_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_MAX_BLOCK_ACQUIRE_FAILURES_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_MAX_BLOCK_ACQUIRE_FAILURES_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_READ_USE_CACHE_PRIORITY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_READ_USE_CACHE_PRIORITY_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_SLOW_IO_WARNING_THRESHOLD_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_SLOW_IO_WARNING_THRESHOLD_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_SOCKET_CACHE_CAPACITY_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_SOCKET_CACHE_CAPACITY_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_SOCKET_CACHE_EXPIRY_MSEC_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_SOCKET_CACHE_EXPIRY_MSEC_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_SOCKET_SEND_BUFFER_SIZE_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_SOCKET_SEND_BUFFER_SIZE_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_USE_LEGACY_BLOCKREADERLOCAL;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_USE_LEGACY_BLOCKREADERLOCAL_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_WRITE_PACKET_SIZE_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_WRITE_PACKET_SIZE_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_DATANODE_SOCKET_WRITE_TIMEOUT_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_DATA_TRANSFER_CLIENT_TCPNODELAY_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_DATA_TRANSFER_CLIENT_TCPNODELAY_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_DOMAIN_SOCKET_DISABLE_INTERVAL_SECOND_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_DOMAIN_SOCKET_DISABLE_INTERVAL_SECOND_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_DOMAIN_SOCKET_PATH_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_DOMAIN_SOCKET_PATH_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_REPLICATION_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_REPLICATION_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_SHORT_CIRCUIT_SHARED_MEMORY_WATCHER_INTERRUPT_CHECK_MS;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_SHORT_CIRCUIT_SHARED_MEMORY_WATCHER_INTERRUPT_CHECK_MS_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.Failover;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.HedgedRead;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.Mmap;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.Read;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.Retry;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.ShortCircuit;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.Write;
/**
* DFSClient configuration.
*/
public class DfsClientConf {
private static final Logger LOG = LoggerFactory.getLogger(DfsClientConf
.class);
private final int hdfsTimeout; // timeout value for a DFS operation.
private final int maxFailoverAttempts;
private final int maxRetryAttempts;
private final int maxPipelineRecoveryRetries;
private final int failoverSleepBaseMillis;
private final int failoverSleepMaxMillis;
private final int maxBlockAcquireFailures;
private final int datanodeSocketWriteTimeout;
private final int ioBufferSize;
private final ChecksumOpt defaultChecksumOpt;
private final ChecksumCombineMode checksumCombineMode;
private final int checksumEcSocketTimeout;
private final int writePacketSize;
private final int writeMaxPackets;
private final ByteArrayManager.Conf writeByteArrayManagerConf;
private final int socketTimeout;
private final int socketSendBufferSize;
private final long excludedNodesCacheExpiry;
/** Wait time window (in msec) if BlockMissingException is caught. */
private final int timeWindow;
private final int numCachedConnRetry;
private final int numBlockWriteRetry;
private final int numBlockWriteLocateFollowingRetry;
private final int blockWriteLocateFollowingInitialDelayMs;
private final int blockWriteLocateFollowingMaxDelayMs;
private final long defaultBlockSize;
private final long prefetchSize;
private final boolean uriCacheEnabled;
private final short defaultReplication;
private final String taskId;
private final FsPermission uMask;
private final boolean connectToDnViaHostname;
private final int retryTimesForGetLastBlockLength;
private final int retryIntervalForGetLastBlockLength;
private final long datanodeRestartTimeout;
private final long slowIoWarningThresholdMs;
private final int markSlowNodeAsBadNodeThreshold;
/** wait time window before refreshing blocklocation for inputstream. */
private final long refreshReadBlockLocationsMS;
private final boolean refreshReadBlockLocationsAutomatically;
private final ShortCircuitConf shortCircuitConf;
private final int clientShortCircuitNum;
private final long hedgedReadThresholdMillis;
private final int hedgedReadThreadpoolSize;
private final List<Class<? extends ReplicaAccessorBuilder>>
replicaAccessorBuilderClasses;
private final int stripedReadThreadpoolSize;
private final boolean dataTransferTcpNoDelay;
private final boolean readUseCachePriority;
private final boolean deadNodeDetectionEnabled;
private final long leaseHardLimitPeriod;
private final boolean recoverLeaseOnCloseException;
public DfsClientConf(Configuration conf) {
// The hdfsTimeout is currently the same as the ipc timeout
hdfsTimeout = Client.getRpcTimeout(conf);
maxRetryAttempts = conf.getInt(
Retry.MAX_ATTEMPTS_KEY,
Retry.MAX_ATTEMPTS_DEFAULT);
timeWindow = conf.getInt(
Retry.WINDOW_BASE_KEY,
Retry.WINDOW_BASE_DEFAULT);
retryTimesForGetLastBlockLength = conf.getInt(
Retry.TIMES_GET_LAST_BLOCK_LENGTH_KEY,
Retry.TIMES_GET_LAST_BLOCK_LENGTH_DEFAULT);
retryIntervalForGetLastBlockLength = conf.getInt(
Retry.INTERVAL_GET_LAST_BLOCK_LENGTH_KEY,
Retry.INTERVAL_GET_LAST_BLOCK_LENGTH_DEFAULT);
maxFailoverAttempts = conf.getInt(
Failover.MAX_ATTEMPTS_KEY,
Failover.MAX_ATTEMPTS_DEFAULT);
failoverSleepBaseMillis = conf.getInt(
Failover.SLEEPTIME_BASE_KEY,
Failover.SLEEPTIME_BASE_DEFAULT);
failoverSleepMaxMillis = conf.getInt(
Failover.SLEEPTIME_MAX_KEY,
Failover.SLEEPTIME_MAX_DEFAULT);
maxBlockAcquireFailures = conf.getInt(
DFS_CLIENT_MAX_BLOCK_ACQUIRE_FAILURES_KEY,
DFS_CLIENT_MAX_BLOCK_ACQUIRE_FAILURES_DEFAULT);
datanodeSocketWriteTimeout = conf.getInt(
DFS_DATANODE_SOCKET_WRITE_TIMEOUT_KEY,
HdfsConstants.WRITE_TIMEOUT);
ioBufferSize = conf.getInt(
CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY,
CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT);
defaultChecksumOpt = getChecksumOptFromConf(conf);
checksumCombineMode = getChecksumCombineModeFromConf(conf);
checksumEcSocketTimeout = conf.getInt(DFS_CHECKSUM_EC_SOCKET_TIMEOUT_KEY,
DFS_CHECKSUM_EC_SOCKET_TIMEOUT_DEFAULT);
dataTransferTcpNoDelay = conf.getBoolean(
DFS_DATA_TRANSFER_CLIENT_TCPNODELAY_KEY,
DFS_DATA_TRANSFER_CLIENT_TCPNODELAY_DEFAULT);
socketTimeout = conf.getInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY,
HdfsConstants.READ_TIMEOUT);
socketSendBufferSize = conf.getInt(DFS_CLIENT_SOCKET_SEND_BUFFER_SIZE_KEY,
DFS_CLIENT_SOCKET_SEND_BUFFER_SIZE_DEFAULT);
/** dfs.write.packet.size is an internal config variable */
writePacketSize = conf.getInt(
DFS_CLIENT_WRITE_PACKET_SIZE_KEY,
DFS_CLIENT_WRITE_PACKET_SIZE_DEFAULT);
writeMaxPackets = conf.getInt(
Write.MAX_PACKETS_IN_FLIGHT_KEY,
Write.MAX_PACKETS_IN_FLIGHT_DEFAULT);
writeByteArrayManagerConf = loadWriteByteArrayManagerConf(conf);
defaultBlockSize = conf.getLongBytes(DFS_BLOCK_SIZE_KEY,
DFS_BLOCK_SIZE_DEFAULT);
defaultReplication = (short) conf.getInt(
DFS_REPLICATION_KEY, DFS_REPLICATION_DEFAULT);
taskId = conf.get("mapreduce.task.attempt.id", "NONMAPREDUCE");
excludedNodesCacheExpiry = conf.getLong(
Write.EXCLUDE_NODES_CACHE_EXPIRY_INTERVAL_KEY,
Write.EXCLUDE_NODES_CACHE_EXPIRY_INTERVAL_DEFAULT);
prefetchSize = conf.getLong(Read.PREFETCH_SIZE_KEY,
10 * defaultBlockSize);
uriCacheEnabled = conf.getBoolean(Read.URI_CACHE_KEY,
Read.URI_CACHE_DEFAULT);
numCachedConnRetry = conf.getInt(DFS_CLIENT_CACHED_CONN_RETRY_KEY,
DFS_CLIENT_CACHED_CONN_RETRY_DEFAULT);
numBlockWriteRetry = conf.getInt(
BlockWrite.RETRIES_KEY,
BlockWrite.RETRIES_DEFAULT);
numBlockWriteLocateFollowingRetry = conf.getInt(
BlockWrite.LOCATEFOLLOWINGBLOCK_RETRIES_KEY,
BlockWrite.LOCATEFOLLOWINGBLOCK_RETRIES_DEFAULT);
blockWriteLocateFollowingInitialDelayMs = conf.getInt(
BlockWrite.LOCATEFOLLOWINGBLOCK_INITIAL_DELAY_MS_KEY,
BlockWrite.LOCATEFOLLOWINGBLOCK_INITIAL_DELAY_MS_DEFAULT);
blockWriteLocateFollowingMaxDelayMs = conf.getInt(
BlockWrite.LOCATEFOLLOWINGBLOCK_MAX_DELAY_MS_KEY,
BlockWrite.LOCATEFOLLOWINGBLOCK_MAX_DELAY_MS_DEFAULT);
uMask = FsPermission.getUMask(conf);
connectToDnViaHostname = conf.getBoolean(DFS_CLIENT_USE_DN_HOSTNAME,
DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT);
datanodeRestartTimeout = conf.getTimeDuration(
DFS_CLIENT_DATANODE_RESTART_TIMEOUT_KEY,
DFS_CLIENT_DATANODE_RESTART_TIMEOUT_DEFAULT,
TimeUnit.SECONDS, TimeUnit.MILLISECONDS);
slowIoWarningThresholdMs = conf.getLong(
DFS_CLIENT_SLOW_IO_WARNING_THRESHOLD_KEY,
DFS_CLIENT_SLOW_IO_WARNING_THRESHOLD_DEFAULT);
readUseCachePriority = conf.getBoolean(DFS_CLIENT_READ_USE_CACHE_PRIORITY,
DFS_CLIENT_READ_USE_CACHE_PRIORITY_DEFAULT);
markSlowNodeAsBadNodeThreshold = conf.getInt(
DFS_CLIENT_MARK_SLOWNODE_AS_BADNODE_THRESHOLD_KEY,
DFS_CLIENT_MARK_SLOWNODE_AS_BADNODE_THRESHOLD_DEFAULT);
refreshReadBlockLocationsMS = conf.getLong(
HdfsClientConfigKeys.DFS_CLIENT_REFRESH_READ_BLOCK_LOCATIONS_MS_KEY,
HdfsClientConfigKeys.
DFS_CLIENT_REFRESH_READ_BLOCK_LOCATIONS_MS_DEFAULT);
refreshReadBlockLocationsAutomatically = conf.getBoolean(
HdfsClientConfigKeys.DFS_CLIENT_REFRESH_READ_BLOCK_LOCATIONS_AUTOMATICALLY_KEY,
HdfsClientConfigKeys.DFS_CLIENT_REFRESH_READ_BLOCK_LOCATIONS_AUTOMATICALLY_DEFAULT);
hedgedReadThresholdMillis = conf.getLong(
HedgedRead.THRESHOLD_MILLIS_KEY,
HedgedRead.THRESHOLD_MILLIS_DEFAULT);
hedgedReadThreadpoolSize = conf.getInt(
HdfsClientConfigKeys.HedgedRead.THREADPOOL_SIZE_KEY,
HdfsClientConfigKeys.HedgedRead.THREADPOOL_SIZE_DEFAULT);
deadNodeDetectionEnabled =
conf.getBoolean(DFS_CLIENT_DEAD_NODE_DETECTION_ENABLED_KEY,
DFS_CLIENT_DEAD_NODE_DETECTION_ENABLED_DEFAULT);
stripedReadThreadpoolSize = conf.getInt(
HdfsClientConfigKeys.StripedRead.THREADPOOL_SIZE_KEY,
HdfsClientConfigKeys.StripedRead.THREADPOOL_SIZE_DEFAULT);
Preconditions.checkArgument(stripedReadThreadpoolSize > 0, "The value of " +
HdfsClientConfigKeys.StripedRead.THREADPOOL_SIZE_KEY +
" must be greater than 0.");
replicaAccessorBuilderClasses = loadReplicaAccessorBuilderClasses(conf);
leaseHardLimitPeriod =
conf.getLong(HdfsClientConfigKeys.DFS_LEASE_HARDLIMIT_KEY,
HdfsClientConfigKeys.DFS_LEASE_HARDLIMIT_DEFAULT) * 1000;
shortCircuitConf = new ShortCircuitConf(conf);
clientShortCircuitNum = conf.getInt(
HdfsClientConfigKeys.DFS_CLIENT_SHORT_CIRCUIT_NUM,
HdfsClientConfigKeys.DFS_CLIENT_SHORT_CIRCUIT_NUM_DEFAULT);
Preconditions.checkArgument(clientShortCircuitNum >= 1,
HdfsClientConfigKeys.DFS_CLIENT_SHORT_CIRCUIT_NUM +
"can't be less then 1.");
Preconditions.checkArgument(clientShortCircuitNum <= 5,
HdfsClientConfigKeys.DFS_CLIENT_SHORT_CIRCUIT_NUM +
"can't be more then 5.");
maxPipelineRecoveryRetries = conf.getInt(
HdfsClientConfigKeys.DFS_CLIENT_PIPELINE_RECOVERY_MAX_RETRIES,
HdfsClientConfigKeys.DFS_CLIENT_PIPELINE_RECOVERY_MAX_RETRIES_DEFAULT
);
recoverLeaseOnCloseException = conf.getBoolean(
Write.RECOVER_LEASE_ON_CLOSE_EXCEPTION_KEY,
Write.RECOVER_LEASE_ON_CLOSE_EXCEPTION_DEFAULT
);
}
private ByteArrayManager.Conf loadWriteByteArrayManagerConf(
Configuration conf) {
final boolean byteArrayManagerEnabled = conf.getBoolean(
Write.ByteArrayManager.ENABLED_KEY,
Write.ByteArrayManager.ENABLED_DEFAULT);
if (!byteArrayManagerEnabled) {
return null;
}
final int countThreshold = conf.getInt(
Write.ByteArrayManager.COUNT_THRESHOLD_KEY,
Write.ByteArrayManager.COUNT_THRESHOLD_DEFAULT);
final int countLimit = conf.getInt(
Write.ByteArrayManager.COUNT_LIMIT_KEY,
Write.ByteArrayManager.COUNT_LIMIT_DEFAULT);
final long countResetTimePeriodMs = conf.getLong(
Write.ByteArrayManager.COUNT_RESET_TIME_PERIOD_MS_KEY,
Write.ByteArrayManager.COUNT_RESET_TIME_PERIOD_MS_DEFAULT);
return new ByteArrayManager.Conf(
countThreshold, countLimit, countResetTimePeriodMs);
}
@SuppressWarnings("unchecked")
private List<Class<? extends ReplicaAccessorBuilder>>
loadReplicaAccessorBuilderClasses(Configuration conf) {
String[] classNames = conf.getTrimmedStrings(
HdfsClientConfigKeys.REPLICA_ACCESSOR_BUILDER_CLASSES_KEY);
if (classNames.length == 0) {
return Collections.emptyList();
}
ArrayList<Class<? extends ReplicaAccessorBuilder>> classes =
new ArrayList<>();
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
for (String className: classNames) {
try {
Class<? extends ReplicaAccessorBuilder> cls =
(Class<? extends ReplicaAccessorBuilder>)
classLoader.loadClass(className);
classes.add(cls);
} catch (Throwable t) {
LOG.warn("Unable to load {}", className, t);
}
}
return classes;
}
private static DataChecksum.Type getChecksumType(Configuration conf) {
final String checksum = conf.get(
DFS_CHECKSUM_TYPE_KEY,
DFS_CHECKSUM_TYPE_DEFAULT);
try {
return DataChecksum.Type.valueOf(checksum);
} catch(IllegalArgumentException iae) {
LOG.warn("Bad checksum type: {}. Using default {}", checksum,
DFS_CHECKSUM_TYPE_DEFAULT);
return DataChecksum.Type.valueOf(
DFS_CHECKSUM_TYPE_DEFAULT);
}
}
private static ChecksumCombineMode getChecksumCombineModeFromConf(
Configuration conf) {
final String mode = conf.get(
DFS_CHECKSUM_COMBINE_MODE_KEY,
DFS_CHECKSUM_COMBINE_MODE_DEFAULT);
try {
return ChecksumCombineMode.valueOf(mode);
} catch(IllegalArgumentException iae) {
LOG.warn("Bad checksum combine mode: {}. Using default {}", mode,
DFS_CHECKSUM_COMBINE_MODE_DEFAULT);
return ChecksumCombineMode.valueOf(
DFS_CHECKSUM_COMBINE_MODE_DEFAULT);
}
}
// Construct a checksum option from conf
public static ChecksumOpt getChecksumOptFromConf(Configuration conf) {
DataChecksum.Type type = getChecksumType(conf);
int bytesPerChecksum = conf.getInt(DFS_BYTES_PER_CHECKSUM_KEY,
DFS_BYTES_PER_CHECKSUM_DEFAULT);
return new ChecksumOpt(type, bytesPerChecksum);
}
/** create a DataChecksum with the given option. */
public DataChecksum createChecksum(ChecksumOpt userOpt) {
// Fill in any missing field with the default.
ChecksumOpt opt = ChecksumOpt.processChecksumOpt(
defaultChecksumOpt, userOpt);
DataChecksum dataChecksum = DataChecksum.newDataChecksum(
opt.getChecksumType(),
opt.getBytesPerChecksum());
if (dataChecksum == null) {
throw new HadoopIllegalArgumentException("Invalid checksum type: userOpt="
+ userOpt + ", default=" + defaultChecksumOpt
+ ", effective=null");
}
return dataChecksum;
}
@VisibleForTesting
public int getBlockWriteLocateFollowingInitialDelayMs() {
return blockWriteLocateFollowingInitialDelayMs;
}
public int getBlockWriteLocateFollowingMaxDelayMs() {
return blockWriteLocateFollowingMaxDelayMs;
}
/**
* @return the hdfsTimeout
*/
public int getHdfsTimeout() {
return hdfsTimeout;
}
/**
* @return the maxFailoverAttempts
*/
public int getMaxFailoverAttempts() {
return maxFailoverAttempts;
}
/**
* @return the maxRetryAttempts
*/
public int getMaxRetryAttempts() {
return maxRetryAttempts;
}
/**
* @return the failoverSleepBaseMillis
*/
public int getFailoverSleepBaseMillis() {
return failoverSleepBaseMillis;
}
/**
* @return the failoverSleepMaxMillis
*/
public int getFailoverSleepMaxMillis() {
return failoverSleepMaxMillis;
}
/**
* @return the maxBlockAcquireFailures
*/
public int getMaxBlockAcquireFailures() {
return maxBlockAcquireFailures;
}
/**
* @return the datanodeSocketWriteTimeout
*/
public int getDatanodeSocketWriteTimeout() {
return datanodeSocketWriteTimeout;
}
/**
* @return the ioBufferSize
*/
public int getIoBufferSize() {
return ioBufferSize;
}
/**
* @return the defaultChecksumOpt
*/
public ChecksumOpt getDefaultChecksumOpt() {
return defaultChecksumOpt;
}
/**
* @return the checksumCombineMode
*/
public ChecksumCombineMode getChecksumCombineMode() {
return checksumCombineMode;
}
/**
* @return the checksumEcSocketTimeout
*/
public int getChecksumEcSocketTimeout() {
return checksumEcSocketTimeout;
}
/**
* @return the writePacketSize
*/
public int getWritePacketSize() {
return writePacketSize;
}
/**
* @return the writeMaxPackets
*/
public int getWriteMaxPackets() {
return writeMaxPackets;
}
/**
* @return the writeByteArrayManagerConf
*/
public ByteArrayManager.Conf getWriteByteArrayManagerConf() {
return writeByteArrayManagerConf;
}
/**
* @return whether TCP_NODELAY should be set on client sockets
*/
public boolean getDataTransferTcpNoDelay() {
return dataTransferTcpNoDelay;
}
/**
* @return the socketTimeout
*/
public int getSocketTimeout() {
return socketTimeout;
}
/**
* @return the socketSendBufferSize
*/
public int getSocketSendBufferSize() {
return socketSendBufferSize;
}
/**
* @return the excludedNodesCacheExpiry
*/
public long getExcludedNodesCacheExpiry() {
return excludedNodesCacheExpiry;
}
/**
* @return the timeWindow
*/
public int getTimeWindow() {
return timeWindow;
}
/**
* @return the numCachedConnRetry
*/
public int getNumCachedConnRetry() {
return numCachedConnRetry;
}
/**
* @return the numBlockWriteRetry
*/
public int getNumBlockWriteRetry() {
return numBlockWriteRetry;
}
/**
* @return the numBlockWriteLocateFollowingRetry
*/
public int getNumBlockWriteLocateFollowingRetry() {
return numBlockWriteLocateFollowingRetry;
}
/**
* @return the defaultBlockSize
*/
public long getDefaultBlockSize() {
return defaultBlockSize;
}
/**
* @return the prefetchSize
*/
public long getPrefetchSize() {
return prefetchSize;
}
/**
* @return the uriCacheEnable
*/
public boolean isUriCacheEnabled() {
return uriCacheEnabled;
}
/**
* @return the defaultReplication
*/
public short getDefaultReplication() {
return defaultReplication;
}
/**
* @return the taskId
*/
public String getTaskId() {
return taskId;
}
/**
* @return the uMask
*/
public FsPermission getUMask() {
return uMask;
}
/**
* @return the connectToDnViaHostname
*/
public boolean isConnectToDnViaHostname() {
return connectToDnViaHostname;
}
/**
* @return the retryTimesForGetLastBlockLength
*/
public int getRetryTimesForGetLastBlockLength() {
return retryTimesForGetLastBlockLength;
}
/**
* @return the retryIntervalForGetLastBlockLength
*/
public int getRetryIntervalForGetLastBlockLength() {
return retryIntervalForGetLastBlockLength;
}
/**
* @return the datanodeRestartTimeout
*/
public long getDatanodeRestartTimeout() {
return datanodeRestartTimeout;
}
/**
* @return the slowIoWarningThresholdMs
*/
public long getSlowIoWarningThresholdMs() {
return slowIoWarningThresholdMs;
}
/**
* @return the continuous slowNode replies received to mark slowNode as badNode
*/
public int getMarkSlowNodeAsBadNodeThreshold() {
return markSlowNodeAsBadNodeThreshold;
}
/*
* @return the clientShortCircuitNum
*/
public int getClientShortCircuitNum() {
return clientShortCircuitNum;
}
/**
* @return the hedgedReadThresholdMillis
*/
public long getHedgedReadThresholdMillis() {
return hedgedReadThresholdMillis;
}
/**
* @return the hedgedReadThreadpoolSize
*/
public int getHedgedReadThreadpoolSize() {
return hedgedReadThreadpoolSize;
}
/**
* @return the stripedReadThreadpoolSize
*/
public int getStripedReadThreadpoolSize() {
return stripedReadThreadpoolSize;
}
/**
* @return the deadNodeDetectionEnabled
*/
public boolean isDeadNodeDetectionEnabled() {
return deadNodeDetectionEnabled;
}
/**
* @return the leaseHardLimitPeriod
*/
public long getleaseHardLimitPeriod() {
return leaseHardLimitPeriod;
}
/**
* @return the readUseCachePriority
*/
public boolean isReadUseCachePriority() {
return readUseCachePriority;
}
/**
* @return the replicaAccessorBuilderClasses
*/
public List<Class<? extends ReplicaAccessorBuilder>>
getReplicaAccessorBuilderClasses() {
return replicaAccessorBuilderClasses;
}
public boolean isLocatedBlocksRefresherEnabled() {
return refreshReadBlockLocationsMS > 0;
}
public long getLocatedBlocksRefresherInterval() {
return refreshReadBlockLocationsMS;
}
public boolean isRefreshReadBlockLocationsAutomatically() {
return refreshReadBlockLocationsAutomatically;
}
/**
* @return the shortCircuitConf
*/
public ShortCircuitConf getShortCircuitConf() {
return shortCircuitConf;
}
/**
*@return the maxPipelineRecoveryRetries
*/
public int getMaxPipelineRecoveryRetries() {
return maxPipelineRecoveryRetries;
}
public boolean getRecoverLeaseOnCloseException() {
return recoverLeaseOnCloseException;
}
/**
* Configuration for short-circuit reads.
*/
public static class ShortCircuitConf {
private static final Logger LOG = DfsClientConf.LOG;
private final int socketCacheCapacity;
private final long socketCacheExpiry;
private final boolean useLegacyBlockReaderLocal;
private final String domainSocketPath;
private final boolean skipShortCircuitChecksums;
private final int shortCircuitBufferSize;
private final boolean shortCircuitLocalReads;
private final boolean domainSocketDataTraffic;
private final int shortCircuitStreamsCacheSize;
private final long shortCircuitStreamsCacheExpiryMs;
private final int shortCircuitSharedMemoryWatcherInterruptCheckMs;
// Short Circuit Read Metrics
private final boolean scrMetricsEnabled;
private final int scrMetricsSamplingPercentage;
private final boolean shortCircuitMmapEnabled;
private final int shortCircuitMmapCacheSize;
private final long shortCircuitMmapCacheExpiryMs;
private final long shortCircuitMmapCacheRetryTimeout;
private final long shortCircuitCacheStaleThresholdMs;
private final long domainSocketDisableIntervalSeconds;
private final long keyProviderCacheExpiryMs;
public ShortCircuitConf(Configuration conf) {
socketCacheCapacity = conf.getInt(
DFS_CLIENT_SOCKET_CACHE_CAPACITY_KEY,
DFS_CLIENT_SOCKET_CACHE_CAPACITY_DEFAULT);
socketCacheExpiry = conf.getLong(
DFS_CLIENT_SOCKET_CACHE_EXPIRY_MSEC_KEY,
DFS_CLIENT_SOCKET_CACHE_EXPIRY_MSEC_DEFAULT);
useLegacyBlockReaderLocal = conf.getBoolean(
DFS_CLIENT_USE_LEGACY_BLOCKREADERLOCAL,
DFS_CLIENT_USE_LEGACY_BLOCKREADERLOCAL_DEFAULT);
shortCircuitLocalReads = conf.getBoolean(
Read.ShortCircuit.KEY,
Read.ShortCircuit.DEFAULT);
int scrSamplingPercentage = conf.getInt(
Read.ShortCircuit.METRICS_SAMPLING_PERCENTAGE_KEY,
Read.ShortCircuit.METRICS_SAMPLING_PERCENTAGE_DEFAULT);
if (scrSamplingPercentage <= 0) {
scrMetricsSamplingPercentage = 0;
scrMetricsEnabled = false;
} else if (scrSamplingPercentage > 100) {
scrMetricsSamplingPercentage = 100;
scrMetricsEnabled = true;
} else {
scrMetricsSamplingPercentage = scrSamplingPercentage;
scrMetricsEnabled = true;
}
domainSocketDataTraffic = conf.getBoolean(
DFS_CLIENT_DOMAIN_SOCKET_DATA_TRAFFIC,
DFS_CLIENT_DOMAIN_SOCKET_DATA_TRAFFIC_DEFAULT);
domainSocketPath = conf.getTrimmed(
DFS_DOMAIN_SOCKET_PATH_KEY,
DFS_DOMAIN_SOCKET_PATH_DEFAULT);
LOG.debug(DFS_CLIENT_USE_LEGACY_BLOCKREADERLOCAL
+ " = {}", useLegacyBlockReaderLocal);
LOG.debug(Read.ShortCircuit.KEY
+ " = {}", shortCircuitLocalReads);
LOG.debug(DFS_CLIENT_DOMAIN_SOCKET_DATA_TRAFFIC
+ " = {}", domainSocketDataTraffic);
LOG.debug(DFS_DOMAIN_SOCKET_PATH_KEY
+ " = {}", domainSocketPath);
skipShortCircuitChecksums = conf.getBoolean(
Read.ShortCircuit.SKIP_CHECKSUM_KEY,
Read.ShortCircuit.SKIP_CHECKSUM_DEFAULT);
shortCircuitBufferSize = conf.getInt(
Read.ShortCircuit.BUFFER_SIZE_KEY,
Read.ShortCircuit.BUFFER_SIZE_DEFAULT);
shortCircuitStreamsCacheSize = conf.getInt(
Read.ShortCircuit.STREAMS_CACHE_SIZE_KEY,
Read.ShortCircuit.STREAMS_CACHE_SIZE_DEFAULT);
shortCircuitStreamsCacheExpiryMs = conf.getLong(
Read.ShortCircuit.STREAMS_CACHE_EXPIRY_MS_KEY,
Read.ShortCircuit.STREAMS_CACHE_EXPIRY_MS_DEFAULT);
shortCircuitMmapEnabled = conf.getBoolean(
Mmap.ENABLED_KEY,
Mmap.ENABLED_DEFAULT);
shortCircuitMmapCacheSize = conf.getInt(
Mmap.CACHE_SIZE_KEY,
Mmap.CACHE_SIZE_DEFAULT);
shortCircuitMmapCacheExpiryMs = conf.getLong(
Mmap.CACHE_TIMEOUT_MS_KEY,
Mmap.CACHE_TIMEOUT_MS_DEFAULT);
shortCircuitMmapCacheRetryTimeout = conf.getLong(
Mmap.RETRY_TIMEOUT_MS_KEY,
Mmap.RETRY_TIMEOUT_MS_DEFAULT);
shortCircuitCacheStaleThresholdMs = conf.getLong(
ShortCircuit.REPLICA_STALE_THRESHOLD_MS_KEY,
ShortCircuit.REPLICA_STALE_THRESHOLD_MS_DEFAULT);
shortCircuitSharedMemoryWatcherInterruptCheckMs = conf.getInt(
DFS_SHORT_CIRCUIT_SHARED_MEMORY_WATCHER_INTERRUPT_CHECK_MS,
DFS_SHORT_CIRCUIT_SHARED_MEMORY_WATCHER_INTERRUPT_CHECK_MS_DEFAULT);
domainSocketDisableIntervalSeconds = conf.getLong(
DFS_DOMAIN_SOCKET_DISABLE_INTERVAL_SECOND_KEY,
DFS_DOMAIN_SOCKET_DISABLE_INTERVAL_SECOND_DEFAULT);
Preconditions.checkArgument(domainSocketDisableIntervalSeconds >= 0,
DFS_DOMAIN_SOCKET_DISABLE_INTERVAL_SECOND_KEY + "can't be negative.");
keyProviderCacheExpiryMs = conf.getLong(
DFS_CLIENT_KEY_PROVIDER_CACHE_EXPIRY_MS,
DFS_CLIENT_KEY_PROVIDER_CACHE_EXPIRY_DEFAULT);
}
/**
* @return the socketCacheCapacity
*/
public int getSocketCacheCapacity() {
return socketCacheCapacity;
}
/**
* @return the socketCacheExpiry
*/
public long getSocketCacheExpiry() {
return socketCacheExpiry;
}
public boolean isUseLegacyBlockReaderLocal() {
return useLegacyBlockReaderLocal;
}
public String getDomainSocketPath() {
return domainSocketPath;
}
public boolean isShortCircuitLocalReads() {
return shortCircuitLocalReads;
}
public boolean isScrMetricsEnabled() {
return scrMetricsEnabled;
}
public int getScrMetricsSamplingPercentage() {
return scrMetricsSamplingPercentage;
}
public boolean isDomainSocketDataTraffic() {
return domainSocketDataTraffic;
}
/**
* @return the skipShortCircuitChecksums
*/
public boolean isSkipShortCircuitChecksums() {
return skipShortCircuitChecksums;
}
/**
* @return the shortCircuitBufferSize
*/
public int getShortCircuitBufferSize() {
return shortCircuitBufferSize;
}
/**
* @return the shortCircuitStreamsCacheSize
*/
public int getShortCircuitStreamsCacheSize() {
return shortCircuitStreamsCacheSize;
}
/**
* @return the shortCircuitStreamsCacheExpiryMs
*/
public long getShortCircuitStreamsCacheExpiryMs() {
return shortCircuitStreamsCacheExpiryMs;
}
/**
* @return the shortCircuitSharedMemoryWatcherInterruptCheckMs
*/
public int getShortCircuitSharedMemoryWatcherInterruptCheckMs() {
return shortCircuitSharedMemoryWatcherInterruptCheckMs;
}
/**
* @return the shortCircuitMmapEnabled
*/
public boolean isShortCircuitMmapEnabled() {
return shortCircuitMmapEnabled;
}
/**
* @return the shortCircuitMmapCacheSize
*/
public int getShortCircuitMmapCacheSize() {
return shortCircuitMmapCacheSize;
}
/**
* @return the shortCircuitMmapCacheExpiryMs
*/
public long getShortCircuitMmapCacheExpiryMs() {
return shortCircuitMmapCacheExpiryMs;
}
/**
* @return the shortCircuitMmapCacheRetryTimeout
*/
public long getShortCircuitMmapCacheRetryTimeout() {
return shortCircuitMmapCacheRetryTimeout;
}
/**
* @return the shortCircuitCacheStaleThresholdMs
*/
public long getShortCircuitCacheStaleThresholdMs() {
return shortCircuitCacheStaleThresholdMs;
}
/**
* @return the domainSocketDisableIntervalSeconds
*/
public long getDomainSocketDisableIntervalSeconds() {
return domainSocketDisableIntervalSeconds;
}
/**
* @return the keyProviderCacheExpiryMs
*/
public long getKeyProviderCacheExpiryMs() {
return keyProviderCacheExpiryMs;
}
public String confAsString() {
return "shortCircuitStreamsCacheSize = "
+ shortCircuitStreamsCacheSize
+ ", shortCircuitStreamsCacheExpiryMs = "
+ shortCircuitStreamsCacheExpiryMs
+ ", shortCircuitMmapCacheSize = "
+ shortCircuitMmapCacheSize
+ ", shortCircuitMmapCacheExpiryMs = "
+ shortCircuitMmapCacheExpiryMs
+ ", shortCircuitMmapCacheRetryTimeout = "
+ shortCircuitMmapCacheRetryTimeout
+ ", shortCircuitCacheStaleThresholdMs = "
+ shortCircuitCacheStaleThresholdMs
+ ", socketCacheCapacity = "
+ socketCacheCapacity
+ ", socketCacheExpiry = "
+ socketCacheExpiry
+ ", shortCircuitLocalReads = "
+ shortCircuitLocalReads
+ ", useLegacyBlockReaderLocal = "
+ useLegacyBlockReaderLocal
+ ", domainSocketDataTraffic = "
+ domainSocketDataTraffic
+ ", shortCircuitSharedMemoryWatcherInterruptCheckMs = "
+ shortCircuitSharedMemoryWatcherInterruptCheckMs
+ ", keyProviderCacheExpiryMs = "
+ keyProviderCacheExpiryMs
+ ", domainSocketDisableIntervalSeconds = "
+ domainSocketDisableIntervalSeconds;
}
}
}
|
apache/rocketmq | 37,299 | broker/src/main/java/org/apache/rocketmq/broker/schedule/ScheduleMessageService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.rocketmq.broker.schedule;
import io.opentelemetry.api.common.Attributes;
import java.util.HashMap;
import java.util.Map;
import java.util.Queue;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.rocketmq.broker.BrokerController;
import org.apache.rocketmq.common.ConfigManager;
import org.apache.rocketmq.common.MixAll;
import org.apache.rocketmq.common.ThreadFactoryImpl;
import org.apache.rocketmq.common.TopicFilterType;
import org.apache.rocketmq.common.attribute.TopicMessageType;
import org.apache.rocketmq.common.constant.LoggerName;
import org.apache.rocketmq.common.message.MessageAccessor;
import org.apache.rocketmq.common.message.MessageConst;
import org.apache.rocketmq.common.message.MessageDecoder;
import org.apache.rocketmq.common.message.MessageExt;
import org.apache.rocketmq.common.message.MessageExtBrokerInner;
import org.apache.rocketmq.common.running.RunningStats;
import org.apache.rocketmq.common.topic.TopicValidator;
import org.apache.rocketmq.common.utils.ThreadUtils;
import org.apache.rocketmq.logging.org.slf4j.Logger;
import org.apache.rocketmq.logging.org.slf4j.LoggerFactory;
import org.apache.rocketmq.remoting.protocol.DataVersion;
import org.apache.rocketmq.store.PutMessageResult;
import org.apache.rocketmq.store.PutMessageStatus;
import org.apache.rocketmq.store.config.StorePathConfigHelper;
import org.apache.rocketmq.store.exception.ConsumeQueueException;
import org.apache.rocketmq.store.queue.ConsumeQueueInterface;
import org.apache.rocketmq.store.queue.CqUnit;
import org.apache.rocketmq.store.queue.ReferredIterator;
import static org.apache.rocketmq.broker.metrics.BrokerMetricsConstant.LABEL_CONSUMER_GROUP;
import static org.apache.rocketmq.broker.metrics.BrokerMetricsConstant.LABEL_IS_SYSTEM;
import static org.apache.rocketmq.broker.metrics.BrokerMetricsConstant.LABEL_MESSAGE_TYPE;
import static org.apache.rocketmq.broker.metrics.BrokerMetricsConstant.LABEL_TOPIC;
public class ScheduleMessageService extends ConfigManager {
private static final Logger log = LoggerFactory.getLogger(LoggerName.BROKER_LOGGER_NAME);
private static final long FIRST_DELAY_TIME = 1000L;
private static final long DELAY_FOR_A_WHILE = 100L;
private static final long DELAY_FOR_A_PERIOD = 10000L;
private static final long WAIT_FOR_SHUTDOWN = 5000L;
private static final long DELAY_FOR_A_SLEEP = 10L;
private final ConcurrentSkipListMap<Integer /* level */, Long/* delay timeMillis */> delayLevelTable =
new ConcurrentSkipListMap<>();
private final ConcurrentMap<Integer /* level */, Long/* offset */> offsetTable =
new ConcurrentHashMap<>(32);
private final AtomicBoolean started = new AtomicBoolean(false);
private ScheduledExecutorService deliverExecutorService;
private int maxDelayLevel;
private DataVersion dataVersion = new DataVersion();
private boolean enableAsyncDeliver = false;
private ScheduledExecutorService handleExecutorService;
private final ScheduledExecutorService scheduledPersistService;
private final Map<Integer /* level */, LinkedBlockingQueue<PutResultProcess>> deliverPendingTable =
new ConcurrentHashMap<>(32);
private final BrokerController brokerController;
private final transient AtomicLong versionChangeCounter = new AtomicLong(0);
public ScheduleMessageService(final BrokerController brokerController) {
this.brokerController = brokerController;
this.enableAsyncDeliver = brokerController.getMessageStoreConfig().isEnableScheduleAsyncDeliver();
scheduledPersistService = ThreadUtils.newScheduledThreadPool(1,
new ThreadFactoryImpl("ScheduleMessageServicePersistThread", true, brokerController.getBrokerConfig()));
}
public static int queueId2DelayLevel(final int queueId) {
return queueId + 1;
}
public static int delayLevel2QueueId(final int delayLevel) {
return delayLevel - 1;
}
public void buildRunningStats(HashMap<String, String> stats) throws ConsumeQueueException {
for (Map.Entry<Integer, Long> next : this.offsetTable.entrySet()) {
int queueId = delayLevel2QueueId(next.getKey());
long delayOffset = next.getValue();
long maxOffset = this.brokerController.getMessageStore().getMaxOffsetInQueue(TopicValidator.RMQ_SYS_SCHEDULE_TOPIC, queueId);
String value = String.format("%d,%d", delayOffset, maxOffset);
String key = String.format("%s_%d", RunningStats.scheduleMessageOffset.name(), next.getKey());
stats.put(key, value);
}
}
private void updateOffset(int delayLevel, long offset) {
this.offsetTable.put(delayLevel, offset);
if (versionChangeCounter.incrementAndGet() % brokerController.getBrokerConfig().getDelayOffsetUpdateVersionStep() == 0) {
long stateMachineVersion = brokerController.getMessageStore() != null ? brokerController.getMessageStore().getStateMachineVersion() : 0;
dataVersion.nextVersion(stateMachineVersion);
}
}
public long computeDeliverTimestamp(final int delayLevel, final long storeTimestamp) {
Long time = this.delayLevelTable.get(delayLevel);
if (time != null) {
return time + storeTimestamp;
}
return storeTimestamp + 1000;
}
public void start() {
if (started.compareAndSet(false, true)) {
this.load();
this.deliverExecutorService = ThreadUtils.newScheduledThreadPool(this.maxDelayLevel, new ThreadFactoryImpl("ScheduleMessageTimerThread_"));
if (this.enableAsyncDeliver) {
this.handleExecutorService = ThreadUtils.newScheduledThreadPool(this.maxDelayLevel, new ThreadFactoryImpl("ScheduleMessageExecutorHandleThread_"));
}
for (Map.Entry<Integer, Long> entry : this.delayLevelTable.entrySet()) {
Integer level = entry.getKey();
Long timeDelay = entry.getValue();
Long offset = this.offsetTable.get(level);
if (null == offset) {
offset = 0L;
}
if (timeDelay != null) {
if (this.enableAsyncDeliver) {
this.handleExecutorService.schedule(new HandlePutResultTask(level), FIRST_DELAY_TIME, TimeUnit.MILLISECONDS);
}
this.deliverExecutorService.schedule(new DeliverDelayedMessageTimerTask(level, offset), FIRST_DELAY_TIME, TimeUnit.MILLISECONDS);
}
}
scheduledPersistService.scheduleAtFixedRate(() -> {
try {
ScheduleMessageService.this.persist();
} catch (Throwable e) {
log.error("scheduleAtFixedRate flush exception", e);
}
}, 10000, this.brokerController.getMessageStoreConfig().getFlushDelayOffsetInterval(), TimeUnit.MILLISECONDS);
}
}
public void shutdown() {
stop();
ThreadUtils.shutdown(scheduledPersistService);
}
public boolean stop() {
if (this.started.compareAndSet(true, false) && null != this.deliverExecutorService) {
this.deliverExecutorService.shutdown();
try {
this.deliverExecutorService.awaitTermination(WAIT_FOR_SHUTDOWN, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
log.error("deliverExecutorService awaitTermination error", e);
}
if (this.handleExecutorService != null) {
this.handleExecutorService.shutdown();
try {
this.handleExecutorService.awaitTermination(WAIT_FOR_SHUTDOWN, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
log.error("handleExecutorService awaitTermination error", e);
}
}
for (int i = 1; i <= this.deliverPendingTable.size(); i++) {
log.warn("deliverPendingTable level: {}, size: {}", i, this.deliverPendingTable.get(i).size());
}
this.persist();
}
return true;
}
public boolean isStarted() {
return started.get();
}
public int getMaxDelayLevel() {
return maxDelayLevel;
}
public DataVersion getDataVersion() {
return dataVersion;
}
public void setDataVersion(DataVersion dataVersion) {
this.dataVersion = dataVersion;
}
@Override
public String encode() {
return this.encode(false);
}
@Override
public boolean load() {
boolean result = super.load();
result = result && this.parseDelayLevel();
result = result && this.correctDelayOffset();
return result;
}
public boolean loadWhenSyncDelayOffset() {
boolean result = super.load();
result = result && this.parseDelayLevel();
return result;
}
public boolean correctDelayOffset() {
try {
for (int delayLevel : delayLevelTable.keySet()) {
ConsumeQueueInterface cq =
brokerController.getMessageStore().findConsumeQueue(TopicValidator.RMQ_SYS_SCHEDULE_TOPIC,
delayLevel2QueueId(delayLevel));
Long currentDelayOffset = offsetTable.get(delayLevel);
if (currentDelayOffset == null || cq == null) {
continue;
}
long correctDelayOffset = currentDelayOffset;
long cqMinOffset = cq.getMinOffsetInQueue();
long cqMaxOffset = cq.getMaxOffsetInQueue();
if (currentDelayOffset < cqMinOffset) {
correctDelayOffset = cqMinOffset;
log.error("schedule CQ offset invalid. offset={}, cqMinOffset={}, cqMaxOffset={}, queueId={}",
currentDelayOffset, cqMinOffset, cqMaxOffset, cq.getQueueId());
}
if (currentDelayOffset > cqMaxOffset) {
correctDelayOffset = cqMaxOffset;
log.error("schedule CQ offset invalid. offset={}, cqMinOffset={}, cqMaxOffset={}, queueId={}",
currentDelayOffset, cqMinOffset, cqMaxOffset, cq.getQueueId());
}
if (correctDelayOffset != currentDelayOffset) {
log.error("correct delay offset [ delayLevel {} ] from {} to {}", delayLevel, currentDelayOffset, correctDelayOffset);
offsetTable.put(delayLevel, correctDelayOffset);
}
}
} catch (Exception e) {
log.error("correctDelayOffset exception", e);
return false;
}
return true;
}
@Override
public String configFilePath() {
return StorePathConfigHelper.getDelayOffsetStorePath(this.brokerController.getMessageStore().getMessageStoreConfig()
.getStorePathRootDir());
}
@Override
public void decode(String jsonString) {
if (jsonString != null) {
DelayOffsetSerializeWrapper delayOffsetSerializeWrapper =
DelayOffsetSerializeWrapper.fromJson(jsonString, DelayOffsetSerializeWrapper.class);
if (delayOffsetSerializeWrapper != null) {
this.offsetTable.putAll(delayOffsetSerializeWrapper.getOffsetTable());
// For compatible
if (delayOffsetSerializeWrapper.getDataVersion() != null) {
this.dataVersion.assignNewOne(delayOffsetSerializeWrapper.getDataVersion());
}
}
}
}
@Override
public String encode(final boolean prettyFormat) {
DelayOffsetSerializeWrapper delayOffsetSerializeWrapper = new DelayOffsetSerializeWrapper();
delayOffsetSerializeWrapper.setOffsetTable(this.offsetTable);
delayOffsetSerializeWrapper.setDataVersion(this.dataVersion);
return delayOffsetSerializeWrapper.toJson(prettyFormat);
}
public boolean parseDelayLevel() {
HashMap<String, Long> timeUnitTable = new HashMap<>();
timeUnitTable.put("s", 1000L);
timeUnitTable.put("m", 1000L * 60);
timeUnitTable.put("h", 1000L * 60 * 60);
timeUnitTable.put("d", 1000L * 60 * 60 * 24);
String levelString = this.brokerController.getMessageStoreConfig().getMessageDelayLevel();
try {
String[] levelArray = levelString.split(" ");
for (int i = 0; i < levelArray.length; i++) {
String value = levelArray[i];
String ch = value.substring(value.length() - 1);
Long tu = timeUnitTable.get(ch);
int level = i + 1;
if (level > this.maxDelayLevel) {
this.maxDelayLevel = level;
}
long num = Long.parseLong(value.substring(0, value.length() - 1));
long delayTimeMillis = tu * num;
this.delayLevelTable.put(level, delayTimeMillis);
if (this.enableAsyncDeliver) {
this.deliverPendingTable.put(level, new LinkedBlockingQueue<>());
}
}
} catch (Exception e) {
log.error("parse message delay level failed. messageDelayLevel = {}", levelString, e);
return false;
}
return true;
}
private MessageExtBrokerInner messageTimeUp(MessageExt msgExt) {
MessageExtBrokerInner msgInner = new MessageExtBrokerInner();
msgInner.setBody(msgExt.getBody());
msgInner.setFlag(msgExt.getFlag());
MessageAccessor.setProperties(msgInner, msgExt.getProperties());
TopicFilterType topicFilterType = MessageExt.parseTopicFilterType(msgInner.getSysFlag());
long tagsCodeValue =
MessageExtBrokerInner.tagsString2tagsCode(topicFilterType, msgInner.getTags());
msgInner.setTagsCode(tagsCodeValue);
msgInner.setPropertiesString(MessageDecoder.messageProperties2String(msgExt.getProperties()));
msgInner.setSysFlag(msgExt.getSysFlag());
msgInner.setBornTimestamp(msgExt.getBornTimestamp());
msgInner.setBornHost(msgExt.getBornHost());
msgInner.setStoreHost(msgExt.getStoreHost());
msgInner.setReconsumeTimes(msgExt.getReconsumeTimes());
msgInner.setWaitStoreMsgOK(false);
MessageAccessor.clearProperty(msgInner, MessageConst.PROPERTY_DELAY_TIME_LEVEL);
MessageAccessor.clearProperty(msgInner, MessageConst.PROPERTY_TIMER_DELIVER_MS);
MessageAccessor.clearProperty(msgInner, MessageConst.PROPERTY_TIMER_DELAY_SEC);
msgInner.setTopic(msgInner.getProperty(MessageConst.PROPERTY_REAL_TOPIC));
String queueIdStr = msgInner.getProperty(MessageConst.PROPERTY_REAL_QUEUE_ID);
int queueId = Integer.parseInt(queueIdStr);
msgInner.setQueueId(queueId);
return msgInner;
}
class DeliverDelayedMessageTimerTask implements Runnable {
private final int delayLevel;
private final long offset;
public DeliverDelayedMessageTimerTask(int delayLevel, long offset) {
this.delayLevel = delayLevel;
this.offset = offset;
}
@Override
public void run() {
try {
if (isStarted()) {
this.executeOnTimeUp();
}
} catch (Throwable e) {
// XXX: warn and notify me
log.error("ScheduleMessageService, executeOnTimeUp exception", e);
this.scheduleNextTimerTask(this.offset, DELAY_FOR_A_PERIOD);
}
}
private long correctDeliverTimestamp(final long now, final long deliverTimestamp) {
long result = deliverTimestamp;
long maxTimestamp = now + ScheduleMessageService.this.delayLevelTable.get(this.delayLevel);
if (deliverTimestamp > maxTimestamp) {
result = now;
}
return result;
}
public void executeOnTimeUp() {
ConsumeQueueInterface cq =
ScheduleMessageService.this.brokerController.getMessageStore().getConsumeQueue(TopicValidator.RMQ_SYS_SCHEDULE_TOPIC,
delayLevel2QueueId(delayLevel));
if (cq == null) {
this.scheduleNextTimerTask(this.offset, DELAY_FOR_A_WHILE);
return;
}
ReferredIterator<CqUnit> bufferCQ = cq.iterateFrom(this.offset);
if (bufferCQ == null) {
long resetOffset;
if ((resetOffset = cq.getMinOffsetInQueue()) > this.offset) {
log.error("schedule CQ offset invalid. offset={}, cqMinOffset={}, queueId={}",
this.offset, resetOffset, cq.getQueueId());
} else if ((resetOffset = cq.getMaxOffsetInQueue()) < this.offset) {
log.error("schedule CQ offset invalid. offset={}, cqMaxOffset={}, queueId={}",
this.offset, resetOffset, cq.getQueueId());
} else {
resetOffset = this.offset;
}
this.scheduleNextTimerTask(resetOffset, DELAY_FOR_A_WHILE);
return;
}
long nextOffset = this.offset;
try {
while (bufferCQ.hasNext() && isStarted()) {
CqUnit cqUnit = bufferCQ.next();
long offsetPy = cqUnit.getPos();
int sizePy = cqUnit.getSize();
long tagsCode = cqUnit.getTagsCode();
if (!cqUnit.isTagsCodeValid()) {
//can't find ext content.So re compute tags code.
log.error("[BUG] can't find consume queue extend file content!addr={}, offsetPy={}, sizePy={}",
tagsCode, offsetPy, sizePy);
long msgStoreTime = ScheduleMessageService.this.brokerController.getMessageStore().getCommitLog().pickupStoreTimestamp(offsetPy, sizePy);
tagsCode = computeDeliverTimestamp(delayLevel, msgStoreTime);
}
long now = System.currentTimeMillis();
long deliverTimestamp = this.correctDeliverTimestamp(now, tagsCode);
long currOffset = cqUnit.getQueueOffset();
assert cqUnit.getBatchNum() == 1;
nextOffset = currOffset + cqUnit.getBatchNum();
long countdown = deliverTimestamp - now;
if (countdown > 0) {
this.scheduleNextTimerTask(currOffset, DELAY_FOR_A_WHILE);
ScheduleMessageService.this.updateOffset(this.delayLevel, currOffset);
return;
}
MessageExt msgExt = ScheduleMessageService.this.brokerController.getMessageStore().lookMessageByOffset(offsetPy, sizePy);
if (msgExt == null) {
continue;
}
MessageExtBrokerInner msgInner = ScheduleMessageService.this.messageTimeUp(msgExt);
if (TopicValidator.RMQ_SYS_TRANS_HALF_TOPIC.equals(msgInner.getTopic())) {
log.error("[BUG] the real topic of schedule msg is {}, discard the msg. msg={}",
msgInner.getTopic(), msgInner);
continue;
}
boolean deliverSuc;
if (ScheduleMessageService.this.enableAsyncDeliver) {
deliverSuc = this.asyncDeliver(msgInner, msgExt.getMsgId(), currOffset, offsetPy, sizePy);
} else {
deliverSuc = this.syncDeliver(msgInner, msgExt.getMsgId(), currOffset, offsetPy, sizePy);
}
if (!deliverSuc) {
this.scheduleNextTimerTask(currOffset, DELAY_FOR_A_WHILE);
return;
}
}
} catch (Exception e) {
log.error("ScheduleMessageService, messageTimeUp execute error, offset = {}", nextOffset, e);
} finally {
bufferCQ.release();
}
this.scheduleNextTimerTask(nextOffset, DELAY_FOR_A_WHILE);
}
public void scheduleNextTimerTask(long offset, long delay) {
ScheduleMessageService.this.deliverExecutorService.schedule(new DeliverDelayedMessageTimerTask(
this.delayLevel, offset), delay, TimeUnit.MILLISECONDS);
}
private boolean syncDeliver(MessageExtBrokerInner msgInner, String msgId, long offset, long offsetPy,
int sizePy) {
PutResultProcess resultProcess = deliverMessage(msgInner, msgId, offset, offsetPy, sizePy, false);
PutMessageResult result = resultProcess.get();
boolean sendStatus = result != null && result.getPutMessageStatus() == PutMessageStatus.PUT_OK;
if (sendStatus) {
ScheduleMessageService.this.updateOffset(this.delayLevel, resultProcess.getNextOffset());
}
return sendStatus;
}
private boolean asyncDeliver(MessageExtBrokerInner msgInner, String msgId, long offset, long offsetPy,
int sizePy) {
Queue<PutResultProcess> processesQueue = ScheduleMessageService.this.deliverPendingTable.get(this.delayLevel);
//Flow Control
int currentPendingNum = processesQueue.size();
int maxPendingLimit = brokerController.getMessageStoreConfig()
.getScheduleAsyncDeliverMaxPendingLimit();
if (currentPendingNum > maxPendingLimit) {
log.warn("Asynchronous deliver triggers flow control, " +
"currentPendingNum={}, maxPendingLimit={}", currentPendingNum, maxPendingLimit);
return false;
}
//Blocked
PutResultProcess firstProcess = processesQueue.peek();
if (firstProcess != null && firstProcess.need2Blocked()) {
log.warn("Asynchronous deliver block. info={}", firstProcess.toString());
return false;
}
PutResultProcess resultProcess = deliverMessage(msgInner, msgId, offset, offsetPy, sizePy, true);
processesQueue.add(resultProcess);
return true;
}
private PutResultProcess deliverMessage(MessageExtBrokerInner msgInner, String msgId, long offset,
long offsetPy, int sizePy, boolean autoResend) {
CompletableFuture<PutMessageResult> future =
brokerController.getEscapeBridge().asyncPutMessage(msgInner);
return new PutResultProcess()
.setTopic(msgInner.getTopic())
.setDelayLevel(this.delayLevel)
.setOffset(offset)
.setPhysicOffset(offsetPy)
.setPhysicSize(sizePy)
.setMsgId(msgId)
.setAutoResend(autoResend)
.setFuture(future)
.thenProcess();
}
}
public class HandlePutResultTask implements Runnable {
private final int delayLevel;
public HandlePutResultTask(int delayLevel) {
this.delayLevel = delayLevel;
}
@Override
public void run() {
LinkedBlockingQueue<PutResultProcess> pendingQueue =
ScheduleMessageService.this.deliverPendingTable.get(this.delayLevel);
PutResultProcess putResultProcess;
while ((putResultProcess = pendingQueue.peek()) != null) {
try {
switch (putResultProcess.getStatus()) {
case SUCCESS:
ScheduleMessageService.this.updateOffset(this.delayLevel, putResultProcess.getNextOffset());
pendingQueue.remove();
break;
case RUNNING:
scheduleNextTask();
return;
case EXCEPTION:
if (!isStarted()) {
log.warn("HandlePutResultTask shutdown, info={}", putResultProcess.toString());
return;
}
log.warn("putResultProcess error, info={}", putResultProcess.toString());
putResultProcess.doResend();
break;
case SKIP:
log.warn("putResultProcess skip, info={}", putResultProcess.toString());
pendingQueue.remove();
break;
}
} catch (Exception e) {
log.error("HandlePutResultTask exception. info={}", putResultProcess.toString(), e);
putResultProcess.doResend();
}
}
scheduleNextTask();
}
private void scheduleNextTask() {
if (isStarted()) {
ScheduleMessageService.this.handleExecutorService
.schedule(new HandlePutResultTask(this.delayLevel), DELAY_FOR_A_SLEEP, TimeUnit.MILLISECONDS);
}
}
}
public class PutResultProcess {
private String topic;
private long offset;
private long physicOffset;
private int physicSize;
private int delayLevel;
private String msgId;
private boolean autoResend = false;
private CompletableFuture<PutMessageResult> future;
private volatile AtomicInteger resendCount = new AtomicInteger(0);
private volatile ProcessStatus status = ProcessStatus.RUNNING;
public PutResultProcess setTopic(String topic) {
this.topic = topic;
return this;
}
public PutResultProcess setOffset(long offset) {
this.offset = offset;
return this;
}
public PutResultProcess setPhysicOffset(long physicOffset) {
this.physicOffset = physicOffset;
return this;
}
public PutResultProcess setPhysicSize(int physicSize) {
this.physicSize = physicSize;
return this;
}
public PutResultProcess setDelayLevel(int delayLevel) {
this.delayLevel = delayLevel;
return this;
}
public PutResultProcess setMsgId(String msgId) {
this.msgId = msgId;
return this;
}
public PutResultProcess setAutoResend(boolean autoResend) {
this.autoResend = autoResend;
return this;
}
public PutResultProcess setFuture(CompletableFuture<PutMessageResult> future) {
this.future = future;
return this;
}
public String getTopic() {
return topic;
}
public long getOffset() {
return offset;
}
public long getNextOffset() {
return offset + 1;
}
public long getPhysicOffset() {
return physicOffset;
}
public int getPhysicSize() {
return physicSize;
}
public Integer getDelayLevel() {
return delayLevel;
}
public String getMsgId() {
return msgId;
}
public boolean isAutoResend() {
return autoResend;
}
public CompletableFuture<PutMessageResult> getFuture() {
return future;
}
public AtomicInteger getResendCount() {
return resendCount;
}
public PutResultProcess thenProcess() {
this.future.thenAccept(this::handleResult);
this.future.exceptionally(e -> {
log.error("ScheduleMessageService put message exceptionally, info: {}",
PutResultProcess.this.toString(), e);
onException();
return null;
});
return this;
}
private void handleResult(PutMessageResult result) {
if (result != null && result.getPutMessageStatus() == PutMessageStatus.PUT_OK) {
onSuccess(result);
} else {
log.warn("ScheduleMessageService put message failed. info: {}.", result);
onException();
}
}
public void onSuccess(PutMessageResult result) {
this.status = ProcessStatus.SUCCESS;
if (ScheduleMessageService.this.brokerController.getMessageStore().getMessageStoreConfig().isEnableScheduleMessageStats() && !result.isRemotePut()) {
ScheduleMessageService.this.brokerController.getBrokerStatsManager().incQueueGetNums(MixAll.SCHEDULE_CONSUMER_GROUP, TopicValidator.RMQ_SYS_SCHEDULE_TOPIC, delayLevel - 1, result.getAppendMessageResult().getMsgNum());
ScheduleMessageService.this.brokerController.getBrokerStatsManager().incQueueGetSize(MixAll.SCHEDULE_CONSUMER_GROUP, TopicValidator.RMQ_SYS_SCHEDULE_TOPIC, delayLevel - 1, result.getAppendMessageResult().getWroteBytes());
ScheduleMessageService.this.brokerController.getBrokerStatsManager().incGroupGetNums(MixAll.SCHEDULE_CONSUMER_GROUP, TopicValidator.RMQ_SYS_SCHEDULE_TOPIC, result.getAppendMessageResult().getMsgNum());
ScheduleMessageService.this.brokerController.getBrokerStatsManager().incGroupGetSize(MixAll.SCHEDULE_CONSUMER_GROUP, TopicValidator.RMQ_SYS_SCHEDULE_TOPIC, result.getAppendMessageResult().getWroteBytes());
Attributes attributes = ScheduleMessageService.this.brokerController.getBrokerMetricsManager().newAttributesBuilder()
.put(LABEL_TOPIC, TopicValidator.RMQ_SYS_SCHEDULE_TOPIC)
.put(LABEL_CONSUMER_GROUP, MixAll.SCHEDULE_CONSUMER_GROUP)
.put(LABEL_IS_SYSTEM, true)
.build();
ScheduleMessageService.this.brokerController.getBrokerMetricsManager().getMessagesOutTotal().add(result.getAppendMessageResult().getMsgNum(), attributes);
ScheduleMessageService.this.brokerController.getBrokerMetricsManager().getThroughputOutTotal().add(result.getAppendMessageResult().getWroteBytes(), attributes);
ScheduleMessageService.this.brokerController.getBrokerStatsManager().incTopicPutNums(this.topic, result.getAppendMessageResult().getMsgNum(), 1);
ScheduleMessageService.this.brokerController.getBrokerStatsManager().incTopicPutSize(this.topic, result.getAppendMessageResult().getWroteBytes());
ScheduleMessageService.this.brokerController.getBrokerStatsManager().incBrokerPutNums(this.topic, result.getAppendMessageResult().getMsgNum());
attributes = ScheduleMessageService.this.brokerController.getBrokerMetricsManager().newAttributesBuilder()
.put(LABEL_TOPIC, topic)
.put(LABEL_MESSAGE_TYPE, TopicMessageType.DELAY.getMetricsValue())
.put(LABEL_IS_SYSTEM, TopicValidator.isSystemTopic(topic))
.build();
ScheduleMessageService.this.brokerController.getBrokerMetricsManager().getMessagesInTotal().add(result.getAppendMessageResult().getMsgNum(), attributes);
ScheduleMessageService.this.brokerController.getBrokerMetricsManager().getThroughputInTotal().add(result.getAppendMessageResult().getWroteBytes(), attributes);
ScheduleMessageService.this.brokerController.getBrokerMetricsManager().getMessageSize().record(result.getAppendMessageResult().getWroteBytes() / result.getAppendMessageResult().getMsgNum(), attributes);
}
}
public void onException() {
log.warn("ScheduleMessageService onException, info: {}", this.toString());
if (this.autoResend) {
this.status = ProcessStatus.EXCEPTION;
} else {
this.status = ProcessStatus.SKIP;
}
}
public ProcessStatus getStatus() {
return this.status;
}
public PutMessageResult get() {
try {
return this.future.get();
} catch (InterruptedException | ExecutionException e) {
return new PutMessageResult(PutMessageStatus.UNKNOWN_ERROR, null);
}
}
public void doResend() {
log.info("Resend message, info: {}", this.toString());
// Gradually increase the resend interval.
try {
Thread.sleep(Math.min(this.resendCount.incrementAndGet() * 100, 60 * 1000));
} catch (InterruptedException e) {
e.printStackTrace();
}
try {
MessageExt msgExt = ScheduleMessageService.this.brokerController.getMessageStore().lookMessageByOffset(this.physicOffset, this.physicSize);
if (msgExt == null) {
log.warn("ScheduleMessageService resend not found message. info: {}", this.toString());
this.status = need2Skip() ? ProcessStatus.SKIP : ProcessStatus.EXCEPTION;
return;
}
MessageExtBrokerInner msgInner = ScheduleMessageService.this.messageTimeUp(msgExt);
PutMessageResult result = ScheduleMessageService.this.brokerController.getEscapeBridge().putMessage(msgInner);
this.handleResult(result);
if (result != null && result.getPutMessageStatus() == PutMessageStatus.PUT_OK) {
log.info("Resend message success, info: {}", this.toString());
}
} catch (Exception e) {
this.status = ProcessStatus.EXCEPTION;
log.error("Resend message error, info: {}", this.toString(), e);
}
}
public boolean need2Blocked() {
int maxResendNum2Blocked = ScheduleMessageService.this.brokerController.getMessageStore().getMessageStoreConfig()
.getScheduleAsyncDeliverMaxResendNum2Blocked();
return this.resendCount.get() > maxResendNum2Blocked;
}
public boolean need2Skip() {
int maxResendNum2Blocked = ScheduleMessageService.this.brokerController.getMessageStore().getMessageStoreConfig()
.getScheduleAsyncDeliverMaxResendNum2Blocked();
return this.resendCount.get() > maxResendNum2Blocked * 2;
}
@Override
public String toString() {
return "PutResultProcess{" +
"topic='" + topic + '\'' +
", offset=" + offset +
", physicOffset=" + physicOffset +
", physicSize=" + physicSize +
", delayLevel=" + delayLevel +
", msgId='" + msgId + '\'' +
", autoResend=" + autoResend +
", resendCount=" + resendCount +
", status=" + status +
'}';
}
}
public enum ProcessStatus {
/**
* In process, the processing result has not yet been returned.
*/
RUNNING,
/**
* Put message success.
*/
SUCCESS,
/**
* Put message exception. When autoResend is true, the message will be resend.
*/
EXCEPTION,
/**
* Skip put message. When the message cannot be looked, the message will be skipped.
*/
SKIP,
}
public ConcurrentMap<Integer, Long> getOffsetTable() {
return offsetTable;
}
}
|
googleapis/google-cloud-java | 37,018 | java-recommender/proto-google-cloud-recommender-v1beta1/src/main/java/com/google/cloud/recommender/v1beta1/UpdateRecommenderConfigRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/recommender/v1beta1/recommender_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.recommender.v1beta1;
/**
*
*
* <pre>
* Request for the `UpdateRecommenderConfig` method.
* </pre>
*
* Protobuf type {@code google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest}
*/
public final class UpdateRecommenderConfigRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest)
UpdateRecommenderConfigRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateRecommenderConfigRequest.newBuilder() to construct.
private UpdateRecommenderConfigRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateRecommenderConfigRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateRecommenderConfigRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.recommender.v1beta1.RecommenderProto
.internal_static_google_cloud_recommender_v1beta1_UpdateRecommenderConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.recommender.v1beta1.RecommenderProto
.internal_static_google_cloud_recommender_v1beta1_UpdateRecommenderConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest.class,
com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest.Builder.class);
}
private int bitField0_;
public static final int RECOMMENDER_CONFIG_FIELD_NUMBER = 1;
private com.google.cloud.recommender.v1beta1.RecommenderConfig recommenderConfig_;
/**
*
*
* <pre>
* Required. The RecommenderConfig to update.
* </pre>
*
* <code>
* .google.cloud.recommender.v1beta1.RecommenderConfig recommender_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the recommenderConfig field is set.
*/
@java.lang.Override
public boolean hasRecommenderConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The RecommenderConfig to update.
* </pre>
*
* <code>
* .google.cloud.recommender.v1beta1.RecommenderConfig recommender_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The recommenderConfig.
*/
@java.lang.Override
public com.google.cloud.recommender.v1beta1.RecommenderConfig getRecommenderConfig() {
return recommenderConfig_ == null
? com.google.cloud.recommender.v1beta1.RecommenderConfig.getDefaultInstance()
: recommenderConfig_;
}
/**
*
*
* <pre>
* Required. The RecommenderConfig to update.
* </pre>
*
* <code>
* .google.cloud.recommender.v1beta1.RecommenderConfig recommender_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.recommender.v1beta1.RecommenderConfigOrBuilder
getRecommenderConfigOrBuilder() {
return recommenderConfig_ == null
? com.google.cloud.recommender.v1beta1.RecommenderConfig.getDefaultInstance()
: recommenderConfig_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
public static final int VALIDATE_ONLY_FIELD_NUMBER = 3;
private boolean validateOnly_ = false;
/**
*
*
* <pre>
* If true, validate the request and preview the change, but do not actually
* update it.
* </pre>
*
* <code>bool validate_only = 3;</code>
*
* @return The validateOnly.
*/
@java.lang.Override
public boolean getValidateOnly() {
return validateOnly_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getRecommenderConfig());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
if (validateOnly_ != false) {
output.writeBool(3, validateOnly_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getRecommenderConfig());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
if (validateOnly_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, validateOnly_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest)) {
return super.equals(obj);
}
com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest other =
(com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest) obj;
if (hasRecommenderConfig() != other.hasRecommenderConfig()) return false;
if (hasRecommenderConfig()) {
if (!getRecommenderConfig().equals(other.getRecommenderConfig())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (getValidateOnly() != other.getValidateOnly()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasRecommenderConfig()) {
hash = (37 * hash) + RECOMMENDER_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getRecommenderConfig().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (37 * hash) + VALIDATE_ONLY_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getValidateOnly());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for the `UpdateRecommenderConfig` method.
* </pre>
*
* Protobuf type {@code google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest)
com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.recommender.v1beta1.RecommenderProto
.internal_static_google_cloud_recommender_v1beta1_UpdateRecommenderConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.recommender.v1beta1.RecommenderProto
.internal_static_google_cloud_recommender_v1beta1_UpdateRecommenderConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest.class,
com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest.Builder.class);
}
// Construct using
// com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getRecommenderConfigFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
recommenderConfig_ = null;
if (recommenderConfigBuilder_ != null) {
recommenderConfigBuilder_.dispose();
recommenderConfigBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
validateOnly_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.recommender.v1beta1.RecommenderProto
.internal_static_google_cloud_recommender_v1beta1_UpdateRecommenderConfigRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest
getDefaultInstanceForType() {
return com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest build() {
com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest buildPartial() {
com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest result =
new com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.recommenderConfig_ =
recommenderConfigBuilder_ == null
? recommenderConfig_
: recommenderConfigBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.validateOnly_ = validateOnly_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest) {
return mergeFrom(
(com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest other) {
if (other
== com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest
.getDefaultInstance()) return this;
if (other.hasRecommenderConfig()) {
mergeRecommenderConfig(other.getRecommenderConfig());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
if (other.getValidateOnly() != false) {
setValidateOnly(other.getValidateOnly());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(
getRecommenderConfigFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
validateOnly_ = input.readBool();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.recommender.v1beta1.RecommenderConfig recommenderConfig_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.recommender.v1beta1.RecommenderConfig,
com.google.cloud.recommender.v1beta1.RecommenderConfig.Builder,
com.google.cloud.recommender.v1beta1.RecommenderConfigOrBuilder>
recommenderConfigBuilder_;
/**
*
*
* <pre>
* Required. The RecommenderConfig to update.
* </pre>
*
* <code>
* .google.cloud.recommender.v1beta1.RecommenderConfig recommender_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the recommenderConfig field is set.
*/
public boolean hasRecommenderConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The RecommenderConfig to update.
* </pre>
*
* <code>
* .google.cloud.recommender.v1beta1.RecommenderConfig recommender_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The recommenderConfig.
*/
public com.google.cloud.recommender.v1beta1.RecommenderConfig getRecommenderConfig() {
if (recommenderConfigBuilder_ == null) {
return recommenderConfig_ == null
? com.google.cloud.recommender.v1beta1.RecommenderConfig.getDefaultInstance()
: recommenderConfig_;
} else {
return recommenderConfigBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The RecommenderConfig to update.
* </pre>
*
* <code>
* .google.cloud.recommender.v1beta1.RecommenderConfig recommender_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setRecommenderConfig(
com.google.cloud.recommender.v1beta1.RecommenderConfig value) {
if (recommenderConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
recommenderConfig_ = value;
} else {
recommenderConfigBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The RecommenderConfig to update.
* </pre>
*
* <code>
* .google.cloud.recommender.v1beta1.RecommenderConfig recommender_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setRecommenderConfig(
com.google.cloud.recommender.v1beta1.RecommenderConfig.Builder builderForValue) {
if (recommenderConfigBuilder_ == null) {
recommenderConfig_ = builderForValue.build();
} else {
recommenderConfigBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The RecommenderConfig to update.
* </pre>
*
* <code>
* .google.cloud.recommender.v1beta1.RecommenderConfig recommender_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeRecommenderConfig(
com.google.cloud.recommender.v1beta1.RecommenderConfig value) {
if (recommenderConfigBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& recommenderConfig_ != null
&& recommenderConfig_
!= com.google.cloud.recommender.v1beta1.RecommenderConfig.getDefaultInstance()) {
getRecommenderConfigBuilder().mergeFrom(value);
} else {
recommenderConfig_ = value;
}
} else {
recommenderConfigBuilder_.mergeFrom(value);
}
if (recommenderConfig_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The RecommenderConfig to update.
* </pre>
*
* <code>
* .google.cloud.recommender.v1beta1.RecommenderConfig recommender_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearRecommenderConfig() {
bitField0_ = (bitField0_ & ~0x00000001);
recommenderConfig_ = null;
if (recommenderConfigBuilder_ != null) {
recommenderConfigBuilder_.dispose();
recommenderConfigBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The RecommenderConfig to update.
* </pre>
*
* <code>
* .google.cloud.recommender.v1beta1.RecommenderConfig recommender_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.recommender.v1beta1.RecommenderConfig.Builder
getRecommenderConfigBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getRecommenderConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The RecommenderConfig to update.
* </pre>
*
* <code>
* .google.cloud.recommender.v1beta1.RecommenderConfig recommender_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.recommender.v1beta1.RecommenderConfigOrBuilder
getRecommenderConfigOrBuilder() {
if (recommenderConfigBuilder_ != null) {
return recommenderConfigBuilder_.getMessageOrBuilder();
} else {
return recommenderConfig_ == null
? com.google.cloud.recommender.v1beta1.RecommenderConfig.getDefaultInstance()
: recommenderConfig_;
}
}
/**
*
*
* <pre>
* Required. The RecommenderConfig to update.
* </pre>
*
* <code>
* .google.cloud.recommender.v1beta1.RecommenderConfig recommender_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.recommender.v1beta1.RecommenderConfig,
com.google.cloud.recommender.v1beta1.RecommenderConfig.Builder,
com.google.cloud.recommender.v1beta1.RecommenderConfigOrBuilder>
getRecommenderConfigFieldBuilder() {
if (recommenderConfigBuilder_ == null) {
recommenderConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.recommender.v1beta1.RecommenderConfig,
com.google.cloud.recommender.v1beta1.RecommenderConfig.Builder,
com.google.cloud.recommender.v1beta1.RecommenderConfigOrBuilder>(
getRecommenderConfig(), getParentForChildren(), isClean());
recommenderConfig_ = null;
}
return recommenderConfigBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private boolean validateOnly_;
/**
*
*
* <pre>
* If true, validate the request and preview the change, but do not actually
* update it.
* </pre>
*
* <code>bool validate_only = 3;</code>
*
* @return The validateOnly.
*/
@java.lang.Override
public boolean getValidateOnly() {
return validateOnly_;
}
/**
*
*
* <pre>
* If true, validate the request and preview the change, but do not actually
* update it.
* </pre>
*
* <code>bool validate_only = 3;</code>
*
* @param value The validateOnly to set.
* @return This builder for chaining.
*/
public Builder setValidateOnly(boolean value) {
validateOnly_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* If true, validate the request and preview the change, but do not actually
* update it.
* </pre>
*
* <code>bool validate_only = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearValidateOnly() {
bitField0_ = (bitField0_ & ~0x00000004);
validateOnly_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest)
private static final com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest();
}
public static com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateRecommenderConfigRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateRecommenderConfigRequest>() {
@java.lang.Override
public UpdateRecommenderConfigRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateRecommenderConfigRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateRecommenderConfigRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.recommender.v1beta1.UpdateRecommenderConfigRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,026 | java-speech/proto-google-cloud-speech-v2/src/main/java/com/google/cloud/speech/v2/ListRecognizersResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/speech/v2/cloud_speech.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.speech.v2;
/**
*
*
* <pre>
* Response message for the
* [ListRecognizers][google.cloud.speech.v2.Speech.ListRecognizers] method.
* </pre>
*
* Protobuf type {@code google.cloud.speech.v2.ListRecognizersResponse}
*/
public final class ListRecognizersResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.speech.v2.ListRecognizersResponse)
ListRecognizersResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListRecognizersResponse.newBuilder() to construct.
private ListRecognizersResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListRecognizersResponse() {
recognizers_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListRecognizersResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.speech.v2.CloudSpeechProto
.internal_static_google_cloud_speech_v2_ListRecognizersResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.speech.v2.CloudSpeechProto
.internal_static_google_cloud_speech_v2_ListRecognizersResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.speech.v2.ListRecognizersResponse.class,
com.google.cloud.speech.v2.ListRecognizersResponse.Builder.class);
}
public static final int RECOGNIZERS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.speech.v2.Recognizer> recognizers_;
/**
*
*
* <pre>
* The list of requested Recognizers.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.Recognizer recognizers = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.speech.v2.Recognizer> getRecognizersList() {
return recognizers_;
}
/**
*
*
* <pre>
* The list of requested Recognizers.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.Recognizer recognizers = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.speech.v2.RecognizerOrBuilder>
getRecognizersOrBuilderList() {
return recognizers_;
}
/**
*
*
* <pre>
* The list of requested Recognizers.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.Recognizer recognizers = 1;</code>
*/
@java.lang.Override
public int getRecognizersCount() {
return recognizers_.size();
}
/**
*
*
* <pre>
* The list of requested Recognizers.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.Recognizer recognizers = 1;</code>
*/
@java.lang.Override
public com.google.cloud.speech.v2.Recognizer getRecognizers(int index) {
return recognizers_.get(index);
}
/**
*
*
* <pre>
* The list of requested Recognizers.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.Recognizer recognizers = 1;</code>
*/
@java.lang.Override
public com.google.cloud.speech.v2.RecognizerOrBuilder getRecognizersOrBuilder(int index) {
return recognizers_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as
* [page_token][google.cloud.speech.v2.ListRecognizersRequest.page_token] to
* retrieve the next page. If this field is omitted, there are no subsequent
* pages. This token expires after 72 hours.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as
* [page_token][google.cloud.speech.v2.ListRecognizersRequest.page_token] to
* retrieve the next page. If this field is omitted, there are no subsequent
* pages. This token expires after 72 hours.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < recognizers_.size(); i++) {
output.writeMessage(1, recognizers_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < recognizers_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, recognizers_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.speech.v2.ListRecognizersResponse)) {
return super.equals(obj);
}
com.google.cloud.speech.v2.ListRecognizersResponse other =
(com.google.cloud.speech.v2.ListRecognizersResponse) obj;
if (!getRecognizersList().equals(other.getRecognizersList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getRecognizersCount() > 0) {
hash = (37 * hash) + RECOGNIZERS_FIELD_NUMBER;
hash = (53 * hash) + getRecognizersList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.speech.v2.ListRecognizersResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v2.ListRecognizersResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v2.ListRecognizersResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v2.ListRecognizersResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v2.ListRecognizersResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v2.ListRecognizersResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v2.ListRecognizersResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v2.ListRecognizersResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.speech.v2.ListRecognizersResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v2.ListRecognizersResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.speech.v2.ListRecognizersResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v2.ListRecognizersResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.speech.v2.ListRecognizersResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for the
* [ListRecognizers][google.cloud.speech.v2.Speech.ListRecognizers] method.
* </pre>
*
* Protobuf type {@code google.cloud.speech.v2.ListRecognizersResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.speech.v2.ListRecognizersResponse)
com.google.cloud.speech.v2.ListRecognizersResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.speech.v2.CloudSpeechProto
.internal_static_google_cloud_speech_v2_ListRecognizersResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.speech.v2.CloudSpeechProto
.internal_static_google_cloud_speech_v2_ListRecognizersResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.speech.v2.ListRecognizersResponse.class,
com.google.cloud.speech.v2.ListRecognizersResponse.Builder.class);
}
// Construct using com.google.cloud.speech.v2.ListRecognizersResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (recognizersBuilder_ == null) {
recognizers_ = java.util.Collections.emptyList();
} else {
recognizers_ = null;
recognizersBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.speech.v2.CloudSpeechProto
.internal_static_google_cloud_speech_v2_ListRecognizersResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.speech.v2.ListRecognizersResponse getDefaultInstanceForType() {
return com.google.cloud.speech.v2.ListRecognizersResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.speech.v2.ListRecognizersResponse build() {
com.google.cloud.speech.v2.ListRecognizersResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.speech.v2.ListRecognizersResponse buildPartial() {
com.google.cloud.speech.v2.ListRecognizersResponse result =
new com.google.cloud.speech.v2.ListRecognizersResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.speech.v2.ListRecognizersResponse result) {
if (recognizersBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
recognizers_ = java.util.Collections.unmodifiableList(recognizers_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.recognizers_ = recognizers_;
} else {
result.recognizers_ = recognizersBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.speech.v2.ListRecognizersResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.speech.v2.ListRecognizersResponse) {
return mergeFrom((com.google.cloud.speech.v2.ListRecognizersResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.speech.v2.ListRecognizersResponse other) {
if (other == com.google.cloud.speech.v2.ListRecognizersResponse.getDefaultInstance())
return this;
if (recognizersBuilder_ == null) {
if (!other.recognizers_.isEmpty()) {
if (recognizers_.isEmpty()) {
recognizers_ = other.recognizers_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureRecognizersIsMutable();
recognizers_.addAll(other.recognizers_);
}
onChanged();
}
} else {
if (!other.recognizers_.isEmpty()) {
if (recognizersBuilder_.isEmpty()) {
recognizersBuilder_.dispose();
recognizersBuilder_ = null;
recognizers_ = other.recognizers_;
bitField0_ = (bitField0_ & ~0x00000001);
recognizersBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getRecognizersFieldBuilder()
: null;
} else {
recognizersBuilder_.addAllMessages(other.recognizers_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.speech.v2.Recognizer m =
input.readMessage(
com.google.cloud.speech.v2.Recognizer.parser(), extensionRegistry);
if (recognizersBuilder_ == null) {
ensureRecognizersIsMutable();
recognizers_.add(m);
} else {
recognizersBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.speech.v2.Recognizer> recognizers_ =
java.util.Collections.emptyList();
private void ensureRecognizersIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
recognizers_ = new java.util.ArrayList<com.google.cloud.speech.v2.Recognizer>(recognizers_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.speech.v2.Recognizer,
com.google.cloud.speech.v2.Recognizer.Builder,
com.google.cloud.speech.v2.RecognizerOrBuilder>
recognizersBuilder_;
/**
*
*
* <pre>
* The list of requested Recognizers.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.Recognizer recognizers = 1;</code>
*/
public java.util.List<com.google.cloud.speech.v2.Recognizer> getRecognizersList() {
if (recognizersBuilder_ == null) {
return java.util.Collections.unmodifiableList(recognizers_);
} else {
return recognizersBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of requested Recognizers.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.Recognizer recognizers = 1;</code>
*/
public int getRecognizersCount() {
if (recognizersBuilder_ == null) {
return recognizers_.size();
} else {
return recognizersBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of requested Recognizers.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.Recognizer recognizers = 1;</code>
*/
public com.google.cloud.speech.v2.Recognizer getRecognizers(int index) {
if (recognizersBuilder_ == null) {
return recognizers_.get(index);
} else {
return recognizersBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of requested Recognizers.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.Recognizer recognizers = 1;</code>
*/
public Builder setRecognizers(int index, com.google.cloud.speech.v2.Recognizer value) {
if (recognizersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRecognizersIsMutable();
recognizers_.set(index, value);
onChanged();
} else {
recognizersBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of requested Recognizers.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.Recognizer recognizers = 1;</code>
*/
public Builder setRecognizers(
int index, com.google.cloud.speech.v2.Recognizer.Builder builderForValue) {
if (recognizersBuilder_ == null) {
ensureRecognizersIsMutable();
recognizers_.set(index, builderForValue.build());
onChanged();
} else {
recognizersBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of requested Recognizers.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.Recognizer recognizers = 1;</code>
*/
public Builder addRecognizers(com.google.cloud.speech.v2.Recognizer value) {
if (recognizersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRecognizersIsMutable();
recognizers_.add(value);
onChanged();
} else {
recognizersBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of requested Recognizers.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.Recognizer recognizers = 1;</code>
*/
public Builder addRecognizers(int index, com.google.cloud.speech.v2.Recognizer value) {
if (recognizersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRecognizersIsMutable();
recognizers_.add(index, value);
onChanged();
} else {
recognizersBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of requested Recognizers.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.Recognizer recognizers = 1;</code>
*/
public Builder addRecognizers(com.google.cloud.speech.v2.Recognizer.Builder builderForValue) {
if (recognizersBuilder_ == null) {
ensureRecognizersIsMutable();
recognizers_.add(builderForValue.build());
onChanged();
} else {
recognizersBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of requested Recognizers.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.Recognizer recognizers = 1;</code>
*/
public Builder addRecognizers(
int index, com.google.cloud.speech.v2.Recognizer.Builder builderForValue) {
if (recognizersBuilder_ == null) {
ensureRecognizersIsMutable();
recognizers_.add(index, builderForValue.build());
onChanged();
} else {
recognizersBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of requested Recognizers.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.Recognizer recognizers = 1;</code>
*/
public Builder addAllRecognizers(
java.lang.Iterable<? extends com.google.cloud.speech.v2.Recognizer> values) {
if (recognizersBuilder_ == null) {
ensureRecognizersIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, recognizers_);
onChanged();
} else {
recognizersBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of requested Recognizers.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.Recognizer recognizers = 1;</code>
*/
public Builder clearRecognizers() {
if (recognizersBuilder_ == null) {
recognizers_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
recognizersBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of requested Recognizers.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.Recognizer recognizers = 1;</code>
*/
public Builder removeRecognizers(int index) {
if (recognizersBuilder_ == null) {
ensureRecognizersIsMutable();
recognizers_.remove(index);
onChanged();
} else {
recognizersBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of requested Recognizers.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.Recognizer recognizers = 1;</code>
*/
public com.google.cloud.speech.v2.Recognizer.Builder getRecognizersBuilder(int index) {
return getRecognizersFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of requested Recognizers.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.Recognizer recognizers = 1;</code>
*/
public com.google.cloud.speech.v2.RecognizerOrBuilder getRecognizersOrBuilder(int index) {
if (recognizersBuilder_ == null) {
return recognizers_.get(index);
} else {
return recognizersBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of requested Recognizers.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.Recognizer recognizers = 1;</code>
*/
public java.util.List<? extends com.google.cloud.speech.v2.RecognizerOrBuilder>
getRecognizersOrBuilderList() {
if (recognizersBuilder_ != null) {
return recognizersBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(recognizers_);
}
}
/**
*
*
* <pre>
* The list of requested Recognizers.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.Recognizer recognizers = 1;</code>
*/
public com.google.cloud.speech.v2.Recognizer.Builder addRecognizersBuilder() {
return getRecognizersFieldBuilder()
.addBuilder(com.google.cloud.speech.v2.Recognizer.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of requested Recognizers.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.Recognizer recognizers = 1;</code>
*/
public com.google.cloud.speech.v2.Recognizer.Builder addRecognizersBuilder(int index) {
return getRecognizersFieldBuilder()
.addBuilder(index, com.google.cloud.speech.v2.Recognizer.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of requested Recognizers.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.Recognizer recognizers = 1;</code>
*/
public java.util.List<com.google.cloud.speech.v2.Recognizer.Builder>
getRecognizersBuilderList() {
return getRecognizersFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.speech.v2.Recognizer,
com.google.cloud.speech.v2.Recognizer.Builder,
com.google.cloud.speech.v2.RecognizerOrBuilder>
getRecognizersFieldBuilder() {
if (recognizersBuilder_ == null) {
recognizersBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.speech.v2.Recognizer,
com.google.cloud.speech.v2.Recognizer.Builder,
com.google.cloud.speech.v2.RecognizerOrBuilder>(
recognizers_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
recognizers_ = null;
}
return recognizersBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as
* [page_token][google.cloud.speech.v2.ListRecognizersRequest.page_token] to
* retrieve the next page. If this field is omitted, there are no subsequent
* pages. This token expires after 72 hours.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as
* [page_token][google.cloud.speech.v2.ListRecognizersRequest.page_token] to
* retrieve the next page. If this field is omitted, there are no subsequent
* pages. This token expires after 72 hours.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as
* [page_token][google.cloud.speech.v2.ListRecognizersRequest.page_token] to
* retrieve the next page. If this field is omitted, there are no subsequent
* pages. This token expires after 72 hours.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as
* [page_token][google.cloud.speech.v2.ListRecognizersRequest.page_token] to
* retrieve the next page. If this field is omitted, there are no subsequent
* pages. This token expires after 72 hours.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as
* [page_token][google.cloud.speech.v2.ListRecognizersRequest.page_token] to
* retrieve the next page. If this field is omitted, there are no subsequent
* pages. This token expires after 72 hours.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.speech.v2.ListRecognizersResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.speech.v2.ListRecognizersResponse)
private static final com.google.cloud.speech.v2.ListRecognizersResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.speech.v2.ListRecognizersResponse();
}
public static com.google.cloud.speech.v2.ListRecognizersResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListRecognizersResponse> PARSER =
new com.google.protobuf.AbstractParser<ListRecognizersResponse>() {
@java.lang.Override
public ListRecognizersResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListRecognizersResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListRecognizersResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.speech.v2.ListRecognizersResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,003 | java-datacatalog/proto-google-cloud-datacatalog-v1beta1/src/main/java/com/google/cloud/datacatalog/v1beta1/ListEntryGroupsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datacatalog/v1beta1/datacatalog.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datacatalog.v1beta1;
/**
*
*
* <pre>
* Response message for
* [ListEntryGroups][google.cloud.datacatalog.v1beta1.DataCatalog.ListEntryGroups].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse}
*/
public final class ListEntryGroupsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse)
ListEntryGroupsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListEntryGroupsResponse.newBuilder() to construct.
private ListEntryGroupsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListEntryGroupsResponse() {
entryGroups_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListEntryGroupsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.v1beta1.Datacatalog
.internal_static_google_cloud_datacatalog_v1beta1_ListEntryGroupsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.v1beta1.Datacatalog
.internal_static_google_cloud_datacatalog_v1beta1_ListEntryGroupsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse.class,
com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse.Builder.class);
}
public static final int ENTRY_GROUPS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.datacatalog.v1beta1.EntryGroup> entryGroups_;
/**
*
*
* <pre>
* EntryGroup details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.EntryGroup entry_groups = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.datacatalog.v1beta1.EntryGroup> getEntryGroupsList() {
return entryGroups_;
}
/**
*
*
* <pre>
* EntryGroup details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.EntryGroup entry_groups = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.datacatalog.v1beta1.EntryGroupOrBuilder>
getEntryGroupsOrBuilderList() {
return entryGroups_;
}
/**
*
*
* <pre>
* EntryGroup details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.EntryGroup entry_groups = 1;</code>
*/
@java.lang.Override
public int getEntryGroupsCount() {
return entryGroups_.size();
}
/**
*
*
* <pre>
* EntryGroup details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.EntryGroup entry_groups = 1;</code>
*/
@java.lang.Override
public com.google.cloud.datacatalog.v1beta1.EntryGroup getEntryGroups(int index) {
return entryGroups_.get(index);
}
/**
*
*
* <pre>
* EntryGroup details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.EntryGroup entry_groups = 1;</code>
*/
@java.lang.Override
public com.google.cloud.datacatalog.v1beta1.EntryGroupOrBuilder getEntryGroupsOrBuilder(
int index) {
return entryGroups_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results. It is set to empty if no items
* remain in results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results. It is set to empty if no items
* remain in results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < entryGroups_.size(); i++) {
output.writeMessage(1, entryGroups_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < entryGroups_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, entryGroups_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse)) {
return super.equals(obj);
}
com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse other =
(com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse) obj;
if (!getEntryGroupsList().equals(other.getEntryGroupsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getEntryGroupsCount() > 0) {
hash = (37 * hash) + ENTRY_GROUPS_FIELD_NUMBER;
hash = (53 * hash) + getEntryGroupsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [ListEntryGroups][google.cloud.datacatalog.v1beta1.DataCatalog.ListEntryGroups].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse)
com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.v1beta1.Datacatalog
.internal_static_google_cloud_datacatalog_v1beta1_ListEntryGroupsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.v1beta1.Datacatalog
.internal_static_google_cloud_datacatalog_v1beta1_ListEntryGroupsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse.class,
com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse.Builder.class);
}
// Construct using com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (entryGroupsBuilder_ == null) {
entryGroups_ = java.util.Collections.emptyList();
} else {
entryGroups_ = null;
entryGroupsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datacatalog.v1beta1.Datacatalog
.internal_static_google_cloud_datacatalog_v1beta1_ListEntryGroupsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse
getDefaultInstanceForType() {
return com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse build() {
com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse buildPartial() {
com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse result =
new com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse result) {
if (entryGroupsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
entryGroups_ = java.util.Collections.unmodifiableList(entryGroups_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.entryGroups_ = entryGroups_;
} else {
result.entryGroups_ = entryGroupsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse) {
return mergeFrom((com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse other) {
if (other
== com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse.getDefaultInstance())
return this;
if (entryGroupsBuilder_ == null) {
if (!other.entryGroups_.isEmpty()) {
if (entryGroups_.isEmpty()) {
entryGroups_ = other.entryGroups_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureEntryGroupsIsMutable();
entryGroups_.addAll(other.entryGroups_);
}
onChanged();
}
} else {
if (!other.entryGroups_.isEmpty()) {
if (entryGroupsBuilder_.isEmpty()) {
entryGroupsBuilder_.dispose();
entryGroupsBuilder_ = null;
entryGroups_ = other.entryGroups_;
bitField0_ = (bitField0_ & ~0x00000001);
entryGroupsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getEntryGroupsFieldBuilder()
: null;
} else {
entryGroupsBuilder_.addAllMessages(other.entryGroups_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.datacatalog.v1beta1.EntryGroup m =
input.readMessage(
com.google.cloud.datacatalog.v1beta1.EntryGroup.parser(),
extensionRegistry);
if (entryGroupsBuilder_ == null) {
ensureEntryGroupsIsMutable();
entryGroups_.add(m);
} else {
entryGroupsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.datacatalog.v1beta1.EntryGroup> entryGroups_ =
java.util.Collections.emptyList();
private void ensureEntryGroupsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
entryGroups_ =
new java.util.ArrayList<com.google.cloud.datacatalog.v1beta1.EntryGroup>(entryGroups_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datacatalog.v1beta1.EntryGroup,
com.google.cloud.datacatalog.v1beta1.EntryGroup.Builder,
com.google.cloud.datacatalog.v1beta1.EntryGroupOrBuilder>
entryGroupsBuilder_;
/**
*
*
* <pre>
* EntryGroup details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.EntryGroup entry_groups = 1;</code>
*/
public java.util.List<com.google.cloud.datacatalog.v1beta1.EntryGroup> getEntryGroupsList() {
if (entryGroupsBuilder_ == null) {
return java.util.Collections.unmodifiableList(entryGroups_);
} else {
return entryGroupsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* EntryGroup details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.EntryGroup entry_groups = 1;</code>
*/
public int getEntryGroupsCount() {
if (entryGroupsBuilder_ == null) {
return entryGroups_.size();
} else {
return entryGroupsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* EntryGroup details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.EntryGroup entry_groups = 1;</code>
*/
public com.google.cloud.datacatalog.v1beta1.EntryGroup getEntryGroups(int index) {
if (entryGroupsBuilder_ == null) {
return entryGroups_.get(index);
} else {
return entryGroupsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* EntryGroup details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.EntryGroup entry_groups = 1;</code>
*/
public Builder setEntryGroups(
int index, com.google.cloud.datacatalog.v1beta1.EntryGroup value) {
if (entryGroupsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEntryGroupsIsMutable();
entryGroups_.set(index, value);
onChanged();
} else {
entryGroupsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* EntryGroup details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.EntryGroup entry_groups = 1;</code>
*/
public Builder setEntryGroups(
int index, com.google.cloud.datacatalog.v1beta1.EntryGroup.Builder builderForValue) {
if (entryGroupsBuilder_ == null) {
ensureEntryGroupsIsMutable();
entryGroups_.set(index, builderForValue.build());
onChanged();
} else {
entryGroupsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* EntryGroup details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.EntryGroup entry_groups = 1;</code>
*/
public Builder addEntryGroups(com.google.cloud.datacatalog.v1beta1.EntryGroup value) {
if (entryGroupsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEntryGroupsIsMutable();
entryGroups_.add(value);
onChanged();
} else {
entryGroupsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* EntryGroup details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.EntryGroup entry_groups = 1;</code>
*/
public Builder addEntryGroups(
int index, com.google.cloud.datacatalog.v1beta1.EntryGroup value) {
if (entryGroupsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEntryGroupsIsMutable();
entryGroups_.add(index, value);
onChanged();
} else {
entryGroupsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* EntryGroup details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.EntryGroup entry_groups = 1;</code>
*/
public Builder addEntryGroups(
com.google.cloud.datacatalog.v1beta1.EntryGroup.Builder builderForValue) {
if (entryGroupsBuilder_ == null) {
ensureEntryGroupsIsMutable();
entryGroups_.add(builderForValue.build());
onChanged();
} else {
entryGroupsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* EntryGroup details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.EntryGroup entry_groups = 1;</code>
*/
public Builder addEntryGroups(
int index, com.google.cloud.datacatalog.v1beta1.EntryGroup.Builder builderForValue) {
if (entryGroupsBuilder_ == null) {
ensureEntryGroupsIsMutable();
entryGroups_.add(index, builderForValue.build());
onChanged();
} else {
entryGroupsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* EntryGroup details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.EntryGroup entry_groups = 1;</code>
*/
public Builder addAllEntryGroups(
java.lang.Iterable<? extends com.google.cloud.datacatalog.v1beta1.EntryGroup> values) {
if (entryGroupsBuilder_ == null) {
ensureEntryGroupsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, entryGroups_);
onChanged();
} else {
entryGroupsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* EntryGroup details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.EntryGroup entry_groups = 1;</code>
*/
public Builder clearEntryGroups() {
if (entryGroupsBuilder_ == null) {
entryGroups_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
entryGroupsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* EntryGroup details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.EntryGroup entry_groups = 1;</code>
*/
public Builder removeEntryGroups(int index) {
if (entryGroupsBuilder_ == null) {
ensureEntryGroupsIsMutable();
entryGroups_.remove(index);
onChanged();
} else {
entryGroupsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* EntryGroup details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.EntryGroup entry_groups = 1;</code>
*/
public com.google.cloud.datacatalog.v1beta1.EntryGroup.Builder getEntryGroupsBuilder(
int index) {
return getEntryGroupsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* EntryGroup details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.EntryGroup entry_groups = 1;</code>
*/
public com.google.cloud.datacatalog.v1beta1.EntryGroupOrBuilder getEntryGroupsOrBuilder(
int index) {
if (entryGroupsBuilder_ == null) {
return entryGroups_.get(index);
} else {
return entryGroupsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* EntryGroup details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.EntryGroup entry_groups = 1;</code>
*/
public java.util.List<? extends com.google.cloud.datacatalog.v1beta1.EntryGroupOrBuilder>
getEntryGroupsOrBuilderList() {
if (entryGroupsBuilder_ != null) {
return entryGroupsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(entryGroups_);
}
}
/**
*
*
* <pre>
* EntryGroup details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.EntryGroup entry_groups = 1;</code>
*/
public com.google.cloud.datacatalog.v1beta1.EntryGroup.Builder addEntryGroupsBuilder() {
return getEntryGroupsFieldBuilder()
.addBuilder(com.google.cloud.datacatalog.v1beta1.EntryGroup.getDefaultInstance());
}
/**
*
*
* <pre>
* EntryGroup details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.EntryGroup entry_groups = 1;</code>
*/
public com.google.cloud.datacatalog.v1beta1.EntryGroup.Builder addEntryGroupsBuilder(
int index) {
return getEntryGroupsFieldBuilder()
.addBuilder(index, com.google.cloud.datacatalog.v1beta1.EntryGroup.getDefaultInstance());
}
/**
*
*
* <pre>
* EntryGroup details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1beta1.EntryGroup entry_groups = 1;</code>
*/
public java.util.List<com.google.cloud.datacatalog.v1beta1.EntryGroup.Builder>
getEntryGroupsBuilderList() {
return getEntryGroupsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datacatalog.v1beta1.EntryGroup,
com.google.cloud.datacatalog.v1beta1.EntryGroup.Builder,
com.google.cloud.datacatalog.v1beta1.EntryGroupOrBuilder>
getEntryGroupsFieldBuilder() {
if (entryGroupsBuilder_ == null) {
entryGroupsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datacatalog.v1beta1.EntryGroup,
com.google.cloud.datacatalog.v1beta1.EntryGroup.Builder,
com.google.cloud.datacatalog.v1beta1.EntryGroupOrBuilder>(
entryGroups_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
entryGroups_ = null;
}
return entryGroupsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results. It is set to empty if no items
* remain in results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results. It is set to empty if no items
* remain in results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results. It is set to empty if no items
* remain in results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results. It is set to empty if no items
* remain in results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results. It is set to empty if no items
* remain in results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse)
private static final com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse();
}
public static com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListEntryGroupsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListEntryGroupsResponse>() {
@java.lang.Override
public ListEntryGroupsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListEntryGroupsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListEntryGroupsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1beta1.ListEntryGroupsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
google/guava | 37,087 | android/guava/src/com/google/common/collect/HashBiMap.java | /*
* Copyright (C) 2007 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.collect;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.NullnessCasts.uncheckedCastNullableTToT;
import static com.google.common.collect.NullnessCasts.unsafeNull;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.annotations.J2ktIncompatible;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import com.google.errorprone.annotations.concurrent.LazyInit;
import com.google.j2objc.annotations.RetainedWith;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.AbstractMap;
import java.util.AbstractSet;
import java.util.Arrays;
import java.util.ConcurrentModificationException;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Objects;
import java.util.Set;
import org.jspecify.annotations.Nullable;
/**
* A {@link BiMap} backed by two hash tables. This implementation allows null keys and values. A
* {@code HashBiMap} and its inverse are both serializable.
*
* <p>This implementation guarantees insertion-based iteration order of its keys.
*
* <p>See the Guava User Guide article on <a href=
* "https://github.com/google/guava/wiki/NewCollectionTypesExplained#bimap">{@code BiMap} </a>.
*
* @author Louis Wasserman
* @author Mike Bostock
* @since 2.0
*/
@GwtCompatible
public final class HashBiMap<K extends @Nullable Object, V extends @Nullable Object>
extends AbstractMap<K, V> implements BiMap<K, V>, Serializable {
/** Returns a new, empty {@code HashBiMap} with the default initial capacity (16). */
public static <K extends @Nullable Object, V extends @Nullable Object> HashBiMap<K, V> create() {
return create(16);
}
/**
* Constructs a new, empty bimap with the specified expected size.
*
* @param expectedSize the expected number of entries
* @throws IllegalArgumentException if the specified expected size is negative
*/
public static <K extends @Nullable Object, V extends @Nullable Object> HashBiMap<K, V> create(
int expectedSize) {
return new HashBiMap<>(expectedSize);
}
/**
* Constructs a new bimap containing initial values from {@code map}. The bimap is created with an
* initial capacity sufficient to hold the mappings in the specified map.
*/
public static <K extends @Nullable Object, V extends @Nullable Object> HashBiMap<K, V> create(
Map<? extends K, ? extends V> map) {
HashBiMap<K, V> bimap = create(map.size());
bimap.putAll(map);
return bimap;
}
private static final int ABSENT = -1;
private static final int ENDPOINT = -2;
/** Maps an "entry" to the key of that entry. */
transient @Nullable K[] keys;
/** Maps an "entry" to the value of that entry. */
transient @Nullable V[] values;
transient int size;
transient int modCount;
/** Maps a bucket to the "entry" of its first element. */
private transient int[] hashTableKToV;
/** Maps a bucket to the "entry" of its first element. */
private transient int[] hashTableVToK;
/** Maps an "entry" to the "entry" that follows it in its bucket. */
private transient int[] nextInBucketKToV;
/** Maps an "entry" to the "entry" that follows it in its bucket. */
private transient int[] nextInBucketVToK;
/** The "entry" of the first element in insertion order. */
private transient int firstInInsertionOrder;
/** The "entry" of the last element in insertion order. */
private transient int lastInInsertionOrder;
/** Maps an "entry" to the "entry" that precedes it in insertion order. */
private transient int[] prevInInsertionOrder;
/** Maps an "entry" to the "entry" that follows it in insertion order. */
private transient int[] nextInInsertionOrder;
private HashBiMap(int expectedSize) {
init(expectedSize);
}
@SuppressWarnings("unchecked")
void init(int expectedSize) {
CollectPreconditions.checkNonnegative(expectedSize, "expectedSize");
int tableSize = Hashing.closedTableSize(expectedSize, 1.0);
size = 0;
keys = (K[]) new Object[expectedSize];
values = (V[]) new Object[expectedSize];
hashTableKToV = createFilledWithAbsent(tableSize);
hashTableVToK = createFilledWithAbsent(tableSize);
nextInBucketKToV = createFilledWithAbsent(expectedSize);
nextInBucketVToK = createFilledWithAbsent(expectedSize);
firstInInsertionOrder = ENDPOINT;
lastInInsertionOrder = ENDPOINT;
prevInInsertionOrder = createFilledWithAbsent(expectedSize);
nextInInsertionOrder = createFilledWithAbsent(expectedSize);
}
/** Returns an int array of the specified size, filled with ABSENT. */
private static int[] createFilledWithAbsent(int size) {
int[] array = new int[size];
Arrays.fill(array, ABSENT);
return array;
}
/** Equivalent to {@code Arrays.copyOf(array, newSize)}, save that the new elements are ABSENT. */
private static int[] expandAndFillWithAbsent(int[] array, int newSize) {
int oldSize = array.length;
int[] result = Arrays.copyOf(array, newSize);
Arrays.fill(result, oldSize, newSize, ABSENT);
return result;
}
@Override
public int size() {
return size;
}
/**
* Ensures that all of the internal structures in the HashBiMap are ready for this many elements.
*/
private void ensureCapacity(int minCapacity) {
if (nextInBucketKToV.length < minCapacity) {
int oldCapacity = nextInBucketKToV.length;
int newCapacity = ImmutableCollection.Builder.expandedCapacity(oldCapacity, minCapacity);
keys = Arrays.copyOf(keys, newCapacity);
values = Arrays.copyOf(values, newCapacity);
nextInBucketKToV = expandAndFillWithAbsent(nextInBucketKToV, newCapacity);
nextInBucketVToK = expandAndFillWithAbsent(nextInBucketVToK, newCapacity);
prevInInsertionOrder = expandAndFillWithAbsent(prevInInsertionOrder, newCapacity);
nextInInsertionOrder = expandAndFillWithAbsent(nextInInsertionOrder, newCapacity);
}
if (hashTableKToV.length < minCapacity) {
int newTableSize = Hashing.closedTableSize(minCapacity, 1.0);
hashTableKToV = createFilledWithAbsent(newTableSize);
hashTableVToK = createFilledWithAbsent(newTableSize);
for (int entryToRehash = 0; entryToRehash < size; entryToRehash++) {
int keyHash = Hashing.smearedHash(keys[entryToRehash]);
int keyBucket = bucket(keyHash);
nextInBucketKToV[entryToRehash] = hashTableKToV[keyBucket];
hashTableKToV[keyBucket] = entryToRehash;
int valueHash = Hashing.smearedHash(values[entryToRehash]);
int valueBucket = bucket(valueHash);
nextInBucketVToK[entryToRehash] = hashTableVToK[valueBucket];
hashTableVToK[valueBucket] = entryToRehash;
}
}
}
/**
* Returns the bucket (in either the K-to-V or V-to-K tables) where elements with the specified
* hash could be found, if present, or could be inserted.
*/
private int bucket(int hash) {
return hash & (hashTableKToV.length - 1);
}
/** Given a key, returns the index of the entry in the tables, or ABSENT if not found. */
int findEntryByKey(@Nullable Object key) {
return findEntryByKey(key, Hashing.smearedHash(key));
}
/**
* Given a key and its hash, returns the index of the entry in the tables, or ABSENT if not found.
*/
int findEntryByKey(@Nullable Object key, int keyHash) {
return findEntry(key, keyHash, hashTableKToV, nextInBucketKToV, keys);
}
/** Given a value, returns the index of the entry in the tables, or ABSENT if not found. */
int findEntryByValue(@Nullable Object value) {
return findEntryByValue(value, Hashing.smearedHash(value));
}
/**
* Given a value and its hash, returns the index of the entry in the tables, or ABSENT if not
* found.
*/
int findEntryByValue(@Nullable Object value, int valueHash) {
return findEntry(value, valueHash, hashTableVToK, nextInBucketVToK, values);
}
int findEntry(
@Nullable Object o,
int oHash,
int[] hashTable,
int[] nextInBucket,
@Nullable Object[] array) {
for (int entry = hashTable[bucket(oHash)]; entry != ABSENT; entry = nextInBucket[entry]) {
if (Objects.equals(array[entry], o)) {
return entry;
}
}
return ABSENT;
}
@Override
public boolean containsKey(@Nullable Object key) {
return findEntryByKey(key) != ABSENT;
}
/**
* Returns {@code true} if this BiMap contains an entry whose value is equal to {@code value} (or,
* equivalently, if this inverse view contains a key that is equal to {@code value}).
*
* <p>Due to the property that values in a BiMap are unique, this will tend to execute in
* faster-than-linear time.
*
* @param value the object to search for in the values of this BiMap
* @return true if a mapping exists from a key to the specified value
*/
@Override
public boolean containsValue(@Nullable Object value) {
return findEntryByValue(value) != ABSENT;
}
@Override
public @Nullable V get(@Nullable Object key) {
int entry = findEntryByKey(key);
return (entry == ABSENT) ? null : values[entry];
}
@Nullable K getInverse(@Nullable Object value) {
int entry = findEntryByValue(value);
return (entry == ABSENT) ? null : keys[entry];
}
@Override
@CanIgnoreReturnValue
public @Nullable V put(@ParametricNullness K key, @ParametricNullness V value) {
return put(key, value, false);
}
@Nullable V put(@ParametricNullness K key, @ParametricNullness V value, boolean force) {
int keyHash = Hashing.smearedHash(key);
int entryForKey = findEntryByKey(key, keyHash);
if (entryForKey != ABSENT) {
V oldValue = values[entryForKey];
if (Objects.equals(oldValue, value)) {
return value;
} else {
replaceValueInEntry(entryForKey, value, force);
return oldValue;
}
}
int valueHash = Hashing.smearedHash(value);
int valueEntry = findEntryByValue(value, valueHash);
if (force) {
if (valueEntry != ABSENT) {
removeEntryValueHashKnown(valueEntry, valueHash);
}
} else {
checkArgument(valueEntry == ABSENT, "Value already present: %s", value);
}
ensureCapacity(size + 1);
keys[size] = key;
values[size] = value;
insertIntoTableKToV(size, keyHash);
insertIntoTableVToK(size, valueHash);
setSucceeds(lastInInsertionOrder, size);
setSucceeds(size, ENDPOINT);
size++;
modCount++;
return null;
}
@Override
@CanIgnoreReturnValue
public @Nullable V forcePut(@ParametricNullness K key, @ParametricNullness V value) {
return put(key, value, true);
}
@CanIgnoreReturnValue
@Nullable K putInverse(@ParametricNullness V value, @ParametricNullness K key, boolean force) {
int valueHash = Hashing.smearedHash(value);
int entryForValue = findEntryByValue(value, valueHash);
if (entryForValue != ABSENT) {
K oldKey = keys[entryForValue];
if (Objects.equals(oldKey, key)) {
return key;
} else {
replaceKeyInEntry(entryForValue, key, force);
return oldKey;
}
}
int predecessor = lastInInsertionOrder;
int keyHash = Hashing.smearedHash(key);
int keyEntry = findEntryByKey(key, keyHash);
if (force) {
if (keyEntry != ABSENT) {
predecessor = prevInInsertionOrder[keyEntry];
removeEntryKeyHashKnown(keyEntry, keyHash);
}
} else {
checkArgument(keyEntry == ABSENT, "Key already present: %s", key);
}
// insertion point for new entry is after predecessor
// note predecessor must still be a valid entry: either we deleted an entry that was *not*
// predecessor, or we didn't delete anything
ensureCapacity(size + 1);
keys[size] = key;
values[size] = value;
insertIntoTableKToV(size, keyHash);
insertIntoTableVToK(size, valueHash);
int successor =
(predecessor == ENDPOINT) ? firstInInsertionOrder : nextInInsertionOrder[predecessor];
setSucceeds(predecessor, size);
setSucceeds(size, successor);
size++;
modCount++;
return null;
}
/**
* Updates the pointers of the insertion order linked list so that {@code next} follows {@code
* prev}. {@code ENDPOINT} represents either the first or last entry in the entire map (as
* appropriate).
*/
private void setSucceeds(int prev, int next) {
if (prev == ENDPOINT) {
firstInInsertionOrder = next;
} else {
nextInInsertionOrder[prev] = next;
}
if (next == ENDPOINT) {
lastInInsertionOrder = prev;
} else {
prevInInsertionOrder[next] = prev;
}
}
/**
* Updates the K-to-V hash table to include the entry at the specified index, which is assumed to
* have not yet been added.
*/
private void insertIntoTableKToV(int entry, int keyHash) {
checkArgument(entry != ABSENT);
int keyBucket = bucket(keyHash);
nextInBucketKToV[entry] = hashTableKToV[keyBucket];
hashTableKToV[keyBucket] = entry;
}
/**
* Updates the V-to-K hash table to include the entry at the specified index, which is assumed to
* have not yet been added.
*/
private void insertIntoTableVToK(int entry, int valueHash) {
checkArgument(entry != ABSENT);
int valueBucket = bucket(valueHash);
nextInBucketVToK[entry] = hashTableVToK[valueBucket];
hashTableVToK[valueBucket] = entry;
}
/**
* Updates the K-to-V hash table to remove the entry at the specified index, which is assumed to
* be present. Does not update any other data structures.
*/
private void deleteFromTableKToV(int entry, int keyHash) {
checkArgument(entry != ABSENT);
int keyBucket = bucket(keyHash);
if (hashTableKToV[keyBucket] == entry) {
hashTableKToV[keyBucket] = nextInBucketKToV[entry];
nextInBucketKToV[entry] = ABSENT;
return;
}
int prevInBucket = hashTableKToV[keyBucket];
for (int entryInBucket = nextInBucketKToV[prevInBucket];
entryInBucket != ABSENT;
entryInBucket = nextInBucketKToV[entryInBucket]) {
if (entryInBucket == entry) {
nextInBucketKToV[prevInBucket] = nextInBucketKToV[entry];
nextInBucketKToV[entry] = ABSENT;
return;
}
prevInBucket = entryInBucket;
}
throw new AssertionError("Expected to find entry with key " + keys[entry]);
}
/**
* Updates the V-to-K hash table to remove the entry at the specified index, which is assumed to
* be present. Does not update any other data structures.
*/
private void deleteFromTableVToK(int entry, int valueHash) {
checkArgument(entry != ABSENT);
int valueBucket = bucket(valueHash);
if (hashTableVToK[valueBucket] == entry) {
hashTableVToK[valueBucket] = nextInBucketVToK[entry];
nextInBucketVToK[entry] = ABSENT;
return;
}
int prevInBucket = hashTableVToK[valueBucket];
for (int entryInBucket = nextInBucketVToK[prevInBucket];
entryInBucket != ABSENT;
entryInBucket = nextInBucketVToK[entryInBucket]) {
if (entryInBucket == entry) {
nextInBucketVToK[prevInBucket] = nextInBucketVToK[entry];
nextInBucketVToK[entry] = ABSENT;
return;
}
prevInBucket = entryInBucket;
}
throw new AssertionError("Expected to find entry with value " + values[entry]);
}
/**
* Updates the specified entry to point to the new value: removes the old value from the V-to-K
* mapping and puts the new one in. The entry does not move in the insertion order of the bimap.
*/
private void replaceValueInEntry(int entry, @ParametricNullness V newValue, boolean force) {
checkArgument(entry != ABSENT);
int newValueHash = Hashing.smearedHash(newValue);
int newValueIndex = findEntryByValue(newValue, newValueHash);
if (newValueIndex != ABSENT) {
if (force) {
removeEntryValueHashKnown(newValueIndex, newValueHash);
if (entry == size) { // this entry got moved to newValueIndex
entry = newValueIndex;
}
} else {
throw new IllegalArgumentException("Value already present in map: " + newValue);
}
}
// we do *not* update insertion order, and it isn't a structural modification!
deleteFromTableVToK(entry, Hashing.smearedHash(values[entry]));
values[entry] = newValue;
insertIntoTableVToK(entry, newValueHash);
}
/**
* Updates the specified entry to point to the new value: removes the old value from the V-to-K
* mapping and puts the new one in. The entry is moved to the end of the insertion order, or to
* the position of the new key if it was previously present.
*/
private void replaceKeyInEntry(int entry, @ParametricNullness K newKey, boolean force) {
checkArgument(entry != ABSENT);
int newKeyHash = Hashing.smearedHash(newKey);
int newKeyIndex = findEntryByKey(newKey, newKeyHash);
int newPredecessor = lastInInsertionOrder;
int newSuccessor = ENDPOINT;
if (newKeyIndex != ABSENT) {
if (force) {
newPredecessor = prevInInsertionOrder[newKeyIndex];
newSuccessor = nextInInsertionOrder[newKeyIndex];
removeEntryKeyHashKnown(newKeyIndex, newKeyHash);
if (entry == size) { // this entry got moved to newKeyIndex
entry = newKeyIndex;
}
} else {
throw new IllegalArgumentException("Key already present in map: " + newKey);
}
}
if (newPredecessor == entry) {
newPredecessor = prevInInsertionOrder[entry];
} else if (newPredecessor == size) {
newPredecessor = newKeyIndex;
}
if (newSuccessor == entry) {
newSuccessor = nextInInsertionOrder[entry];
} else if (newSuccessor == size) {
newSuccessor = newKeyIndex;
}
int oldPredecessor = prevInInsertionOrder[entry];
int oldSuccessor = nextInInsertionOrder[entry];
setSucceeds(oldPredecessor, oldSuccessor); // remove from insertion order linked list
deleteFromTableKToV(entry, Hashing.smearedHash(keys[entry]));
keys[entry] = newKey;
insertIntoTableKToV(entry, Hashing.smearedHash(newKey));
// insert into insertion order linked list, usually at the end
setSucceeds(newPredecessor, entry);
setSucceeds(entry, newSuccessor);
}
@Override
@CanIgnoreReturnValue
public @Nullable V remove(@Nullable Object key) {
int keyHash = Hashing.smearedHash(key);
int entry = findEntryByKey(key, keyHash);
if (entry == ABSENT) {
return null;
} else {
V value = values[entry];
removeEntryKeyHashKnown(entry, keyHash);
return value;
}
}
@Nullable K removeInverse(@Nullable Object value) {
int valueHash = Hashing.smearedHash(value);
int entry = findEntryByValue(value, valueHash);
if (entry == ABSENT) {
return null;
} else {
K key = keys[entry];
removeEntryValueHashKnown(entry, valueHash);
return key;
}
}
/** Removes the entry at the specified index with no additional data. */
void removeEntry(int entry) {
removeEntryKeyHashKnown(entry, Hashing.smearedHash(keys[entry]));
}
/** Removes the entry at the specified index, given the hash of its key and value. */
private void removeEntry(int entry, int keyHash, int valueHash) {
checkArgument(entry != ABSENT);
deleteFromTableKToV(entry, keyHash);
deleteFromTableVToK(entry, valueHash);
int oldPredecessor = prevInInsertionOrder[entry];
int oldSuccessor = nextInInsertionOrder[entry];
setSucceeds(oldPredecessor, oldSuccessor);
moveEntryToIndex(size - 1, entry);
keys[size - 1] = null;
values[size - 1] = null;
size--;
modCount++;
}
/** Removes the entry at the specified index, given the hash of its key. */
void removeEntryKeyHashKnown(int entry, int keyHash) {
removeEntry(entry, keyHash, Hashing.smearedHash(values[entry]));
}
/** Removes the entry at the specified index, given the hash of its value. */
void removeEntryValueHashKnown(int entry, int valueHash) {
removeEntry(entry, Hashing.smearedHash(keys[entry]), valueHash);
}
/**
* Moves the entry previously positioned at {@code src} to {@code dest}. Assumes the entry
* previously at {@code src} has already been removed from the data structures.
*/
private void moveEntryToIndex(int src, int dest) {
if (src == dest) {
return;
}
int predecessor = prevInInsertionOrder[src];
int successor = nextInInsertionOrder[src];
setSucceeds(predecessor, dest);
setSucceeds(dest, successor);
K key = keys[src];
V value = values[src];
keys[dest] = key;
values[dest] = value;
// update pointers in hashTableKToV
int keyHash = Hashing.smearedHash(key);
int keyBucket = bucket(keyHash);
if (hashTableKToV[keyBucket] == src) {
hashTableKToV[keyBucket] = dest;
} else {
int prevInBucket = hashTableKToV[keyBucket];
for (int entryInBucket = nextInBucketKToV[prevInBucket];
/* should never reach end */ ;
entryInBucket = nextInBucketKToV[entryInBucket]) {
if (entryInBucket == src) {
nextInBucketKToV[prevInBucket] = dest;
break;
}
prevInBucket = entryInBucket;
}
}
nextInBucketKToV[dest] = nextInBucketKToV[src];
nextInBucketKToV[src] = ABSENT;
// update pointers in hashTableVToK
int valueHash = Hashing.smearedHash(value);
int valueBucket = bucket(valueHash);
if (hashTableVToK[valueBucket] == src) {
hashTableVToK[valueBucket] = dest;
} else {
int prevInBucket = hashTableVToK[valueBucket];
for (int entryInBucket = nextInBucketVToK[prevInBucket];
/* should never reach end*/ ;
entryInBucket = nextInBucketVToK[entryInBucket]) {
if (entryInBucket == src) {
nextInBucketVToK[prevInBucket] = dest;
break;
}
prevInBucket = entryInBucket;
}
}
nextInBucketVToK[dest] = nextInBucketVToK[src];
nextInBucketVToK[src] = ABSENT;
}
@Override
public void clear() {
Arrays.fill(keys, 0, size, null);
Arrays.fill(values, 0, size, null);
Arrays.fill(hashTableKToV, ABSENT);
Arrays.fill(hashTableVToK, ABSENT);
Arrays.fill(nextInBucketKToV, 0, size, ABSENT);
Arrays.fill(nextInBucketVToK, 0, size, ABSENT);
Arrays.fill(prevInInsertionOrder, 0, size, ABSENT);
Arrays.fill(nextInInsertionOrder, 0, size, ABSENT);
size = 0;
firstInInsertionOrder = ENDPOINT;
lastInInsertionOrder = ENDPOINT;
modCount++;
}
/** Shared supertype of keySet, values, entrySet, and inverse.entrySet. */
abstract static class View<
K extends @Nullable Object, V extends @Nullable Object, T extends @Nullable Object>
extends AbstractSet<T> {
final HashBiMap<K, V> biMap;
View(HashBiMap<K, V> biMap) {
this.biMap = biMap;
}
@ParametricNullness
abstract T forEntry(int entry);
@Override
public Iterator<T> iterator() {
return new Iterator<T>() {
private int index = biMap.firstInInsertionOrder;
private int indexToRemove = ABSENT;
private int expectedModCount = biMap.modCount;
// Calls to setValue on inverse entries can move already-visited entries to the end.
// Make sure we don't visit those.
private int remaining = biMap.size;
private void checkForComodification() {
if (biMap.modCount != expectedModCount) {
throw new ConcurrentModificationException();
}
}
@Override
public boolean hasNext() {
checkForComodification();
return index != ENDPOINT && remaining > 0;
}
@Override
@ParametricNullness
public T next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
T result = forEntry(index);
indexToRemove = index;
index = biMap.nextInInsertionOrder[index];
remaining--;
return result;
}
@Override
public void remove() {
checkForComodification();
CollectPreconditions.checkRemove(indexToRemove != ABSENT);
biMap.removeEntry(indexToRemove);
if (index == biMap.size) {
index = indexToRemove;
}
indexToRemove = ABSENT;
expectedModCount = biMap.modCount;
}
};
}
@Override
public int size() {
return biMap.size;
}
@Override
public void clear() {
biMap.clear();
}
}
@LazyInit private transient Set<K> keySet;
@Override
public Set<K> keySet() {
Set<K> result = keySet;
return (result == null) ? keySet = new KeySet() : result;
}
final class KeySet extends View<K, V, K> {
KeySet() {
super(HashBiMap.this);
}
@Override
@ParametricNullness
K forEntry(int entry) {
// The cast is safe because we call forEntry only for indexes that contain entries.
return uncheckedCastNullableTToT(keys[entry]);
}
@Override
public boolean contains(@Nullable Object o) {
return HashBiMap.this.containsKey(o);
}
@Override
public boolean remove(@Nullable Object o) {
int oHash = Hashing.smearedHash(o);
int entry = findEntryByKey(o, oHash);
if (entry != ABSENT) {
removeEntryKeyHashKnown(entry, oHash);
return true;
} else {
return false;
}
}
}
@LazyInit private transient Set<V> valueSet;
@Override
public Set<V> values() {
Set<V> result = valueSet;
return (result == null) ? valueSet = new ValueSet() : result;
}
final class ValueSet extends View<K, V, V> {
ValueSet() {
super(HashBiMap.this);
}
@Override
@ParametricNullness
V forEntry(int entry) {
// The cast is safe because we call forEntry only for indexes that contain entries.
return uncheckedCastNullableTToT(values[entry]);
}
@Override
public boolean contains(@Nullable Object o) {
return HashBiMap.this.containsValue(o);
}
@Override
public boolean remove(@Nullable Object o) {
int oHash = Hashing.smearedHash(o);
int entry = findEntryByValue(o, oHash);
if (entry != ABSENT) {
removeEntryValueHashKnown(entry, oHash);
return true;
} else {
return false;
}
}
}
@LazyInit private transient Set<Entry<K, V>> entrySet;
@Override
public Set<Entry<K, V>> entrySet() {
Set<Entry<K, V>> result = entrySet;
return (result == null) ? entrySet = new EntrySet() : result;
}
final class EntrySet extends View<K, V, Entry<K, V>> {
EntrySet() {
super(HashBiMap.this);
}
@Override
public boolean contains(@Nullable Object o) {
if (o instanceof Entry) {
Entry<?, ?> e = (Entry<?, ?>) o;
Object k = e.getKey();
Object v = e.getValue();
int eIndex = findEntryByKey(k);
return eIndex != ABSENT && Objects.equals(v, values[eIndex]);
}
return false;
}
@Override
@CanIgnoreReturnValue
public boolean remove(@Nullable Object o) {
if (o instanceof Entry) {
Entry<?, ?> e = (Entry<?, ?>) o;
Object k = e.getKey();
Object v = e.getValue();
int kHash = Hashing.smearedHash(k);
int eIndex = findEntryByKey(k, kHash);
if (eIndex != ABSENT && Objects.equals(v, values[eIndex])) {
removeEntryKeyHashKnown(eIndex, kHash);
return true;
}
}
return false;
}
@Override
Entry<K, V> forEntry(int entry) {
return new EntryForKey(entry);
}
}
/**
* An {@code Entry} implementation that attempts to follow its key around the map -- that is, if
* the key is moved, deleted, or reinserted, it will account for that -- while not doing any extra
* work if the key has not moved. One quirk: The {@link #getValue()} method can return {@code
* null} even for a map which supposedly does not contain null elements, if the key is not present
* when {@code getValue()} is called.
*/
final class EntryForKey extends AbstractMapEntry<K, V> {
@ParametricNullness final K key;
int index;
EntryForKey(int index) {
// The cast is safe because we call forEntry only for indexes that contain entries.
this.key = uncheckedCastNullableTToT(keys[index]);
this.index = index;
}
void updateIndex() {
if (index == ABSENT || index > size || !Objects.equals(keys[index], key)) {
index = findEntryByKey(key);
}
}
@Override
@ParametricNullness
public K getKey() {
return key;
}
@Override
@ParametricNullness
public V getValue() {
updateIndex();
/*
* If the entry has been removed from the map, we return null, even though that might not be a
* valid value. That's the best we can do, short of holding a reference to the most recently
* seen value. And while we *could* do that, we aren't required to: Map.Entry explicitly says
* that behavior is undefined when the backing map is modified through another API. (It even
* permits us to throw IllegalStateException. Maybe we should have done that, but we probably
* shouldn't change now for fear of breaking people.)
*
* If the entry is still in the map, then updateIndex ensured that `index` points to the right
* element. Because that element is present, uncheckedCastNullableTToT is safe.
*/
return (index == ABSENT) ? unsafeNull() : uncheckedCastNullableTToT(values[index]);
}
@Override
@ParametricNullness
public V setValue(@ParametricNullness V value) {
updateIndex();
if (index == ABSENT) {
HashBiMap.this.put(key, value);
return unsafeNull(); // See the discussion in getValue().
}
/*
* The cast is safe because updateIndex found the entry for this key. (If it hadn't, then we
* would have returned above.) Thus, we know that it and its corresponding value are in
* position `index`.
*/
V oldValue = uncheckedCastNullableTToT(values[index]);
if (Objects.equals(oldValue, value)) {
return value;
}
replaceValueInEntry(index, value, false);
return oldValue;
}
}
@LazyInit @RetainedWith private transient @Nullable BiMap<V, K> inverse;
@Override
public BiMap<V, K> inverse() {
BiMap<V, K> result = inverse;
return (result == null) ? inverse = new Inverse<>(this) : result;
}
private static final class Inverse<K extends @Nullable Object, V extends @Nullable Object>
extends AbstractMap<V, K> implements BiMap<V, K>, Serializable {
private final HashBiMap<K, V> forward;
Inverse(HashBiMap<K, V> forward) {
this.forward = forward;
}
@Override
public int size() {
return forward.size;
}
@Override
public boolean containsKey(@Nullable Object key) {
return forward.containsValue(key);
}
@Override
public @Nullable K get(@Nullable Object key) {
return forward.getInverse(key);
}
@Override
public boolean containsValue(@Nullable Object value) {
return forward.containsKey(value);
}
@Override
@CanIgnoreReturnValue
public @Nullable K put(@ParametricNullness V value, @ParametricNullness K key) {
return forward.putInverse(value, key, false);
}
@Override
@CanIgnoreReturnValue
public @Nullable K forcePut(@ParametricNullness V value, @ParametricNullness K key) {
return forward.putInverse(value, key, true);
}
@Override
public BiMap<K, V> inverse() {
return forward;
}
@Override
@CanIgnoreReturnValue
public @Nullable K remove(@Nullable Object value) {
return forward.removeInverse(value);
}
@Override
public void clear() {
forward.clear();
}
@Override
public Set<V> keySet() {
return forward.values();
}
@Override
public Set<K> values() {
return forward.keySet();
}
private transient Set<Entry<V, K>> inverseEntrySet;
@Override
public Set<Entry<V, K>> entrySet() {
Set<Entry<V, K>> result = inverseEntrySet;
return (result == null) ? inverseEntrySet = new InverseEntrySet<K, V>(forward) : result;
}
@GwtIncompatible("serialization")
private void readObject(ObjectInputStream in) throws ClassNotFoundException, IOException {
in.defaultReadObject();
this.forward.inverse = this;
}
}
private static final class InverseEntrySet<K extends @Nullable Object, V extends @Nullable Object>
extends View<K, V, Entry<V, K>> {
InverseEntrySet(HashBiMap<K, V> biMap) {
super(biMap);
}
@Override
public boolean contains(@Nullable Object o) {
if (o instanceof Entry) {
Entry<?, ?> e = (Entry<?, ?>) o;
Object v = e.getKey();
Object k = e.getValue();
int eIndex = biMap.findEntryByValue(v);
return eIndex != ABSENT && Objects.equals(biMap.keys[eIndex], k);
}
return false;
}
@Override
public boolean remove(@Nullable Object o) {
if (o instanceof Entry) {
Entry<?, ?> e = (Entry<?, ?>) o;
Object v = e.getKey();
Object k = e.getValue();
int vHash = Hashing.smearedHash(v);
int eIndex = biMap.findEntryByValue(v, vHash);
if (eIndex != ABSENT && Objects.equals(biMap.keys[eIndex], k)) {
biMap.removeEntryValueHashKnown(eIndex, vHash);
return true;
}
}
return false;
}
@Override
Entry<V, K> forEntry(int entry) {
return new EntryForValue<K, V>(biMap, entry);
}
}
/**
* An {@code Entry} implementation that attempts to follow its value around the map -- that is, if
* the value is moved, deleted, or reinserted, it will account for that -- while not doing any
* extra work if the value has not moved.
*/
static final class EntryForValue<K extends @Nullable Object, V extends @Nullable Object>
extends AbstractMapEntry<V, K> {
final HashBiMap<K, V> biMap;
@ParametricNullness final V value;
int index;
EntryForValue(HashBiMap<K, V> biMap, int index) {
this.biMap = biMap;
// The cast is safe because we call forEntry only for indexes that contain entries.
this.value = uncheckedCastNullableTToT(biMap.values[index]);
this.index = index;
}
private void updateIndex() {
if (index == ABSENT || index > biMap.size || !Objects.equals(value, biMap.values[index])) {
index = biMap.findEntryByValue(value);
}
}
@Override
@ParametricNullness
public V getKey() {
return value;
}
@Override
@ParametricNullness
public K getValue() {
updateIndex();
// For discussion of unsafeNull() and uncheckedCastNullableTToT(), see EntryForKey.getValue().
return (index == ABSENT) ? unsafeNull() : uncheckedCastNullableTToT(biMap.keys[index]);
}
@Override
@ParametricNullness
public K setValue(@ParametricNullness K key) {
updateIndex();
if (index == ABSENT) {
biMap.putInverse(value, key, false);
return unsafeNull(); // see EntryForKey.setValue()
}
K oldKey = uncheckedCastNullableTToT(biMap.keys[index]); // see EntryForKey.setValue()
if (Objects.equals(oldKey, key)) {
return key;
}
biMap.replaceKeyInEntry(index, key, false);
return oldKey;
}
}
/**
* @serialData the number of entries, first key, first value, second key, second value, and so on.
*/
@GwtIncompatible
@J2ktIncompatible
private void writeObject(ObjectOutputStream stream) throws IOException {
stream.defaultWriteObject();
Serialization.writeMap(this, stream);
}
@GwtIncompatible
@J2ktIncompatible
private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException {
stream.defaultReadObject();
int size = Serialization.readCount(stream);
init(16); // resist hostile attempts to allocate gratuitous heap
Serialization.populateMap(this, stream, size);
}
// TODO(cpovirk): Should we have a serialVersionUID here?
}
|
googleapis/google-cloud-java | 36,839 | java-redis-cluster/proto-google-cloud-redis-cluster-v1/src/main/java/com/google/cloud/redis/cluster/v1/ClusterOrBuilder.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/redis/cluster/v1/cloud_redis_cluster.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.redis.cluster.v1;
public interface ClusterOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.cloud.redis.cluster.v1.Cluster)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* Optional. Backups stored in Cloud Storage buckets.
* The Cloud Storage buckets need to be the same region as the clusters.
* Read permission is required to import from the provided Cloud Storage
* objects.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.Cluster.GcsBackupSource gcs_source = 34 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the gcsSource field is set.
*/
boolean hasGcsSource();
/**
*
*
* <pre>
* Optional. Backups stored in Cloud Storage buckets.
* The Cloud Storage buckets need to be the same region as the clusters.
* Read permission is required to import from the provided Cloud Storage
* objects.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.Cluster.GcsBackupSource gcs_source = 34 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The gcsSource.
*/
com.google.cloud.redis.cluster.v1.Cluster.GcsBackupSource getGcsSource();
/**
*
*
* <pre>
* Optional. Backups stored in Cloud Storage buckets.
* The Cloud Storage buckets need to be the same region as the clusters.
* Read permission is required to import from the provided Cloud Storage
* objects.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.Cluster.GcsBackupSource gcs_source = 34 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.redis.cluster.v1.Cluster.GcsBackupSourceOrBuilder getGcsSourceOrBuilder();
/**
*
*
* <pre>
* Optional. Backups generated and managed by memorystore service.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.Cluster.ManagedBackupSource managed_backup_source = 35 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the managedBackupSource field is set.
*/
boolean hasManagedBackupSource();
/**
*
*
* <pre>
* Optional. Backups generated and managed by memorystore service.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.Cluster.ManagedBackupSource managed_backup_source = 35 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The managedBackupSource.
*/
com.google.cloud.redis.cluster.v1.Cluster.ManagedBackupSource getManagedBackupSource();
/**
*
*
* <pre>
* Optional. Backups generated and managed by memorystore service.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.Cluster.ManagedBackupSource managed_backup_source = 35 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.redis.cluster.v1.Cluster.ManagedBackupSourceOrBuilder
getManagedBackupSourceOrBuilder();
/**
*
*
* <pre>
* Required. Identifier. Unique name of the resource in this scope including
* project and location using the form:
* `projects/{project_id}/locations/{location_id}/clusters/{cluster_id}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IDENTIFIER];
* </code>
*
* @return The name.
*/
java.lang.String getName();
/**
*
*
* <pre>
* Required. Identifier. Unique name of the resource in this scope including
* project and location using the form:
* `projects/{project_id}/locations/{location_id}/clusters/{cluster_id}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IDENTIFIER];
* </code>
*
* @return The bytes for name.
*/
com.google.protobuf.ByteString getNameBytes();
/**
*
*
* <pre>
* Output only. The timestamp associated with the cluster creation request.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the createTime field is set.
*/
boolean hasCreateTime();
/**
*
*
* <pre>
* Output only. The timestamp associated with the cluster creation request.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The createTime.
*/
com.google.protobuf.Timestamp getCreateTime();
/**
*
*
* <pre>
* Output only. The timestamp associated with the cluster creation request.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder();
/**
*
*
* <pre>
* Output only. The current state of this cluster.
* Can be CREATING, READY, UPDATING, DELETING and SUSPENDED
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.Cluster.State state = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The enum numeric value on the wire for state.
*/
int getStateValue();
/**
*
*
* <pre>
* Output only. The current state of this cluster.
* Can be CREATING, READY, UPDATING, DELETING and SUSPENDED
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.Cluster.State state = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The state.
*/
com.google.cloud.redis.cluster.v1.Cluster.State getState();
/**
*
*
* <pre>
* Output only. System assigned, unique identifier for the cluster.
* </pre>
*
* <code>string uid = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The uid.
*/
java.lang.String getUid();
/**
*
*
* <pre>
* Output only. System assigned, unique identifier for the cluster.
* </pre>
*
* <code>string uid = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for uid.
*/
com.google.protobuf.ByteString getUidBytes();
/**
*
*
* <pre>
* Optional. The number of replica nodes per shard.
* </pre>
*
* <code>optional int32 replica_count = 8 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return Whether the replicaCount field is set.
*/
boolean hasReplicaCount();
/**
*
*
* <pre>
* Optional. The number of replica nodes per shard.
* </pre>
*
* <code>optional int32 replica_count = 8 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The replicaCount.
*/
int getReplicaCount();
/**
*
*
* <pre>
* Optional. The authorization mode of the Redis cluster.
* If not provided, auth feature is disabled for the cluster.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.AuthorizationMode authorization_mode = 11 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The enum numeric value on the wire for authorizationMode.
*/
int getAuthorizationModeValue();
/**
*
*
* <pre>
* Optional. The authorization mode of the Redis cluster.
* If not provided, auth feature is disabled for the cluster.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.AuthorizationMode authorization_mode = 11 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The authorizationMode.
*/
com.google.cloud.redis.cluster.v1.AuthorizationMode getAuthorizationMode();
/**
*
*
* <pre>
* Optional. The in-transit encryption for the Redis cluster.
* If not provided, encryption is disabled for the cluster.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.TransitEncryptionMode transit_encryption_mode = 12 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The enum numeric value on the wire for transitEncryptionMode.
*/
int getTransitEncryptionModeValue();
/**
*
*
* <pre>
* Optional. The in-transit encryption for the Redis cluster.
* If not provided, encryption is disabled for the cluster.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.TransitEncryptionMode transit_encryption_mode = 12 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The transitEncryptionMode.
*/
com.google.cloud.redis.cluster.v1.TransitEncryptionMode getTransitEncryptionMode();
/**
*
*
* <pre>
* Output only. Redis memory size in GB for the entire cluster rounded up to
* the next integer.
* </pre>
*
* <code>optional int32 size_gb = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return Whether the sizeGb field is set.
*/
boolean hasSizeGb();
/**
*
*
* <pre>
* Output only. Redis memory size in GB for the entire cluster rounded up to
* the next integer.
* </pre>
*
* <code>optional int32 size_gb = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The sizeGb.
*/
int getSizeGb();
/**
*
*
* <pre>
* Optional. Number of shards for the Redis cluster.
* </pre>
*
* <code>optional int32 shard_count = 14 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return Whether the shardCount field is set.
*/
boolean hasShardCount();
/**
*
*
* <pre>
* Optional. Number of shards for the Redis cluster.
* </pre>
*
* <code>optional int32 shard_count = 14 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The shardCount.
*/
int getShardCount();
/**
*
*
* <pre>
* Optional. Each PscConfig configures the consumer network where IPs will
* be designated to the cluster for client access through Private Service
* Connect Automation. Currently, only one PscConfig is supported.
* </pre>
*
* <code>
* repeated .google.cloud.redis.cluster.v1.PscConfig psc_configs = 15 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
java.util.List<com.google.cloud.redis.cluster.v1.PscConfig> getPscConfigsList();
/**
*
*
* <pre>
* Optional. Each PscConfig configures the consumer network where IPs will
* be designated to the cluster for client access through Private Service
* Connect Automation. Currently, only one PscConfig is supported.
* </pre>
*
* <code>
* repeated .google.cloud.redis.cluster.v1.PscConfig psc_configs = 15 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.redis.cluster.v1.PscConfig getPscConfigs(int index);
/**
*
*
* <pre>
* Optional. Each PscConfig configures the consumer network where IPs will
* be designated to the cluster for client access through Private Service
* Connect Automation. Currently, only one PscConfig is supported.
* </pre>
*
* <code>
* repeated .google.cloud.redis.cluster.v1.PscConfig psc_configs = 15 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
int getPscConfigsCount();
/**
*
*
* <pre>
* Optional. Each PscConfig configures the consumer network where IPs will
* be designated to the cluster for client access through Private Service
* Connect Automation. Currently, only one PscConfig is supported.
* </pre>
*
* <code>
* repeated .google.cloud.redis.cluster.v1.PscConfig psc_configs = 15 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
java.util.List<? extends com.google.cloud.redis.cluster.v1.PscConfigOrBuilder>
getPscConfigsOrBuilderList();
/**
*
*
* <pre>
* Optional. Each PscConfig configures the consumer network where IPs will
* be designated to the cluster for client access through Private Service
* Connect Automation. Currently, only one PscConfig is supported.
* </pre>
*
* <code>
* repeated .google.cloud.redis.cluster.v1.PscConfig psc_configs = 15 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.redis.cluster.v1.PscConfigOrBuilder getPscConfigsOrBuilder(int index);
/**
*
*
* <pre>
* Output only. Endpoints created on each given network, for Redis clients to
* connect to the cluster. Currently only one discovery endpoint is supported.
* </pre>
*
* <code>
* repeated .google.cloud.redis.cluster.v1.DiscoveryEndpoint discovery_endpoints = 16 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
java.util.List<com.google.cloud.redis.cluster.v1.DiscoveryEndpoint> getDiscoveryEndpointsList();
/**
*
*
* <pre>
* Output only. Endpoints created on each given network, for Redis clients to
* connect to the cluster. Currently only one discovery endpoint is supported.
* </pre>
*
* <code>
* repeated .google.cloud.redis.cluster.v1.DiscoveryEndpoint discovery_endpoints = 16 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.redis.cluster.v1.DiscoveryEndpoint getDiscoveryEndpoints(int index);
/**
*
*
* <pre>
* Output only. Endpoints created on each given network, for Redis clients to
* connect to the cluster. Currently only one discovery endpoint is supported.
* </pre>
*
* <code>
* repeated .google.cloud.redis.cluster.v1.DiscoveryEndpoint discovery_endpoints = 16 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
int getDiscoveryEndpointsCount();
/**
*
*
* <pre>
* Output only. Endpoints created on each given network, for Redis clients to
* connect to the cluster. Currently only one discovery endpoint is supported.
* </pre>
*
* <code>
* repeated .google.cloud.redis.cluster.v1.DiscoveryEndpoint discovery_endpoints = 16 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
java.util.List<? extends com.google.cloud.redis.cluster.v1.DiscoveryEndpointOrBuilder>
getDiscoveryEndpointsOrBuilderList();
/**
*
*
* <pre>
* Output only. Endpoints created on each given network, for Redis clients to
* connect to the cluster. Currently only one discovery endpoint is supported.
* </pre>
*
* <code>
* repeated .google.cloud.redis.cluster.v1.DiscoveryEndpoint discovery_endpoints = 16 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.redis.cluster.v1.DiscoveryEndpointOrBuilder getDiscoveryEndpointsOrBuilder(
int index);
/**
*
*
* <pre>
* Output only. The list of PSC connections that are auto-created through
* service connectivity automation.
* </pre>
*
* <code>
* repeated .google.cloud.redis.cluster.v1.PscConnection psc_connections = 17 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
java.util.List<com.google.cloud.redis.cluster.v1.PscConnection> getPscConnectionsList();
/**
*
*
* <pre>
* Output only. The list of PSC connections that are auto-created through
* service connectivity automation.
* </pre>
*
* <code>
* repeated .google.cloud.redis.cluster.v1.PscConnection psc_connections = 17 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.redis.cluster.v1.PscConnection getPscConnections(int index);
/**
*
*
* <pre>
* Output only. The list of PSC connections that are auto-created through
* service connectivity automation.
* </pre>
*
* <code>
* repeated .google.cloud.redis.cluster.v1.PscConnection psc_connections = 17 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
int getPscConnectionsCount();
/**
*
*
* <pre>
* Output only. The list of PSC connections that are auto-created through
* service connectivity automation.
* </pre>
*
* <code>
* repeated .google.cloud.redis.cluster.v1.PscConnection psc_connections = 17 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
java.util.List<? extends com.google.cloud.redis.cluster.v1.PscConnectionOrBuilder>
getPscConnectionsOrBuilderList();
/**
*
*
* <pre>
* Output only. The list of PSC connections that are auto-created through
* service connectivity automation.
* </pre>
*
* <code>
* repeated .google.cloud.redis.cluster.v1.PscConnection psc_connections = 17 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.redis.cluster.v1.PscConnectionOrBuilder getPscConnectionsOrBuilder(int index);
/**
*
*
* <pre>
* Output only. Additional information about the current state of the cluster.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.Cluster.StateInfo state_info = 18 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the stateInfo field is set.
*/
boolean hasStateInfo();
/**
*
*
* <pre>
* Output only. Additional information about the current state of the cluster.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.Cluster.StateInfo state_info = 18 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The stateInfo.
*/
com.google.cloud.redis.cluster.v1.Cluster.StateInfo getStateInfo();
/**
*
*
* <pre>
* Output only. Additional information about the current state of the cluster.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.Cluster.StateInfo state_info = 18 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.redis.cluster.v1.Cluster.StateInfoOrBuilder getStateInfoOrBuilder();
/**
*
*
* <pre>
* Optional. The type of a redis node in the cluster. NodeType determines the
* underlying machine-type of a redis node.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.NodeType node_type = 19 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The enum numeric value on the wire for nodeType.
*/
int getNodeTypeValue();
/**
*
*
* <pre>
* Optional. The type of a redis node in the cluster. NodeType determines the
* underlying machine-type of a redis node.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.NodeType node_type = 19 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The nodeType.
*/
com.google.cloud.redis.cluster.v1.NodeType getNodeType();
/**
*
*
* <pre>
* Optional. Persistence config (RDB, AOF) for the cluster.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.ClusterPersistenceConfig persistence_config = 20 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the persistenceConfig field is set.
*/
boolean hasPersistenceConfig();
/**
*
*
* <pre>
* Optional. Persistence config (RDB, AOF) for the cluster.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.ClusterPersistenceConfig persistence_config = 20 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The persistenceConfig.
*/
com.google.cloud.redis.cluster.v1.ClusterPersistenceConfig getPersistenceConfig();
/**
*
*
* <pre>
* Optional. Persistence config (RDB, AOF) for the cluster.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.ClusterPersistenceConfig persistence_config = 20 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.redis.cluster.v1.ClusterPersistenceConfigOrBuilder
getPersistenceConfigOrBuilder();
/**
*
*
* <pre>
* Optional. Key/Value pairs of customer overrides for mutable Redis Configs
* </pre>
*
* <code>map<string, string> redis_configs = 21 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
int getRedisConfigsCount();
/**
*
*
* <pre>
* Optional. Key/Value pairs of customer overrides for mutable Redis Configs
* </pre>
*
* <code>map<string, string> redis_configs = 21 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
boolean containsRedisConfigs(java.lang.String key);
/** Use {@link #getRedisConfigsMap()} instead. */
@java.lang.Deprecated
java.util.Map<java.lang.String, java.lang.String> getRedisConfigs();
/**
*
*
* <pre>
* Optional. Key/Value pairs of customer overrides for mutable Redis Configs
* </pre>
*
* <code>map<string, string> redis_configs = 21 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
java.util.Map<java.lang.String, java.lang.String> getRedisConfigsMap();
/**
*
*
* <pre>
* Optional. Key/Value pairs of customer overrides for mutable Redis Configs
* </pre>
*
* <code>map<string, string> redis_configs = 21 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
/* nullable */
java.lang.String getRedisConfigsOrDefault(
java.lang.String key,
/* nullable */
java.lang.String defaultValue);
/**
*
*
* <pre>
* Optional. Key/Value pairs of customer overrides for mutable Redis Configs
* </pre>
*
* <code>map<string, string> redis_configs = 21 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
java.lang.String getRedisConfigsOrThrow(java.lang.String key);
/**
*
*
* <pre>
* Output only. Precise value of redis memory size in GB for the entire
* cluster.
* </pre>
*
* <code>optional double precise_size_gb = 22 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return Whether the preciseSizeGb field is set.
*/
boolean hasPreciseSizeGb();
/**
*
*
* <pre>
* Output only. Precise value of redis memory size in GB for the entire
* cluster.
* </pre>
*
* <code>optional double precise_size_gb = 22 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The preciseSizeGb.
*/
double getPreciseSizeGb();
/**
*
*
* <pre>
* Optional. This config will be used to determine how the customer wants us
* to distribute cluster resources within the region.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.ZoneDistributionConfig zone_distribution_config = 23 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the zoneDistributionConfig field is set.
*/
boolean hasZoneDistributionConfig();
/**
*
*
* <pre>
* Optional. This config will be used to determine how the customer wants us
* to distribute cluster resources within the region.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.ZoneDistributionConfig zone_distribution_config = 23 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The zoneDistributionConfig.
*/
com.google.cloud.redis.cluster.v1.ZoneDistributionConfig getZoneDistributionConfig();
/**
*
*
* <pre>
* Optional. This config will be used to determine how the customer wants us
* to distribute cluster resources within the region.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.ZoneDistributionConfig zone_distribution_config = 23 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.redis.cluster.v1.ZoneDistributionConfigOrBuilder
getZoneDistributionConfigOrBuilder();
/**
*
*
* <pre>
* Optional. Cross cluster replication config.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.CrossClusterReplicationConfig cross_cluster_replication_config = 24 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the crossClusterReplicationConfig field is set.
*/
boolean hasCrossClusterReplicationConfig();
/**
*
*
* <pre>
* Optional. Cross cluster replication config.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.CrossClusterReplicationConfig cross_cluster_replication_config = 24 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The crossClusterReplicationConfig.
*/
com.google.cloud.redis.cluster.v1.CrossClusterReplicationConfig
getCrossClusterReplicationConfig();
/**
*
*
* <pre>
* Optional. Cross cluster replication config.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.CrossClusterReplicationConfig cross_cluster_replication_config = 24 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.redis.cluster.v1.CrossClusterReplicationConfigOrBuilder
getCrossClusterReplicationConfigOrBuilder();
/**
*
*
* <pre>
* Optional. The delete operation will fail when the value is set to true.
* </pre>
*
* <code>optional bool deletion_protection_enabled = 25 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the deletionProtectionEnabled field is set.
*/
boolean hasDeletionProtectionEnabled();
/**
*
*
* <pre>
* Optional. The delete operation will fail when the value is set to true.
* </pre>
*
* <code>optional bool deletion_protection_enabled = 25 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The deletionProtectionEnabled.
*/
boolean getDeletionProtectionEnabled();
/**
*
*
* <pre>
* Optional. ClusterMaintenancePolicy determines when to allow or deny
* updates.
* </pre>
*
* <code>
* optional .google.cloud.redis.cluster.v1.ClusterMaintenancePolicy maintenance_policy = 26 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the maintenancePolicy field is set.
*/
boolean hasMaintenancePolicy();
/**
*
*
* <pre>
* Optional. ClusterMaintenancePolicy determines when to allow or deny
* updates.
* </pre>
*
* <code>
* optional .google.cloud.redis.cluster.v1.ClusterMaintenancePolicy maintenance_policy = 26 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The maintenancePolicy.
*/
com.google.cloud.redis.cluster.v1.ClusterMaintenancePolicy getMaintenancePolicy();
/**
*
*
* <pre>
* Optional. ClusterMaintenancePolicy determines when to allow or deny
* updates.
* </pre>
*
* <code>
* optional .google.cloud.redis.cluster.v1.ClusterMaintenancePolicy maintenance_policy = 26 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.redis.cluster.v1.ClusterMaintenancePolicyOrBuilder
getMaintenancePolicyOrBuilder();
/**
*
*
* <pre>
* Output only. ClusterMaintenanceSchedule Output only Published maintenance
* schedule.
* </pre>
*
* <code>
* optional .google.cloud.redis.cluster.v1.ClusterMaintenanceSchedule maintenance_schedule = 27 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the maintenanceSchedule field is set.
*/
boolean hasMaintenanceSchedule();
/**
*
*
* <pre>
* Output only. ClusterMaintenanceSchedule Output only Published maintenance
* schedule.
* </pre>
*
* <code>
* optional .google.cloud.redis.cluster.v1.ClusterMaintenanceSchedule maintenance_schedule = 27 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The maintenanceSchedule.
*/
com.google.cloud.redis.cluster.v1.ClusterMaintenanceSchedule getMaintenanceSchedule();
/**
*
*
* <pre>
* Output only. ClusterMaintenanceSchedule Output only Published maintenance
* schedule.
* </pre>
*
* <code>
* optional .google.cloud.redis.cluster.v1.ClusterMaintenanceSchedule maintenance_schedule = 27 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.redis.cluster.v1.ClusterMaintenanceScheduleOrBuilder
getMaintenanceScheduleOrBuilder();
/**
*
*
* <pre>
* Output only. Service attachment details to configure Psc connections
* </pre>
*
* <code>
* repeated .google.cloud.redis.cluster.v1.PscServiceAttachment psc_service_attachments = 30 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
java.util.List<com.google.cloud.redis.cluster.v1.PscServiceAttachment>
getPscServiceAttachmentsList();
/**
*
*
* <pre>
* Output only. Service attachment details to configure Psc connections
* </pre>
*
* <code>
* repeated .google.cloud.redis.cluster.v1.PscServiceAttachment psc_service_attachments = 30 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.redis.cluster.v1.PscServiceAttachment getPscServiceAttachments(int index);
/**
*
*
* <pre>
* Output only. Service attachment details to configure Psc connections
* </pre>
*
* <code>
* repeated .google.cloud.redis.cluster.v1.PscServiceAttachment psc_service_attachments = 30 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
int getPscServiceAttachmentsCount();
/**
*
*
* <pre>
* Output only. Service attachment details to configure Psc connections
* </pre>
*
* <code>
* repeated .google.cloud.redis.cluster.v1.PscServiceAttachment psc_service_attachments = 30 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
java.util.List<? extends com.google.cloud.redis.cluster.v1.PscServiceAttachmentOrBuilder>
getPscServiceAttachmentsOrBuilderList();
/**
*
*
* <pre>
* Output only. Service attachment details to configure Psc connections
* </pre>
*
* <code>
* repeated .google.cloud.redis.cluster.v1.PscServiceAttachment psc_service_attachments = 30 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.redis.cluster.v1.PscServiceAttachmentOrBuilder getPscServiceAttachmentsOrBuilder(
int index);
/**
*
*
* <pre>
* Optional. A list of cluster enpoints.
* </pre>
*
* <code>
* repeated .google.cloud.redis.cluster.v1.ClusterEndpoint cluster_endpoints = 36 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
java.util.List<com.google.cloud.redis.cluster.v1.ClusterEndpoint> getClusterEndpointsList();
/**
*
*
* <pre>
* Optional. A list of cluster enpoints.
* </pre>
*
* <code>
* repeated .google.cloud.redis.cluster.v1.ClusterEndpoint cluster_endpoints = 36 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.redis.cluster.v1.ClusterEndpoint getClusterEndpoints(int index);
/**
*
*
* <pre>
* Optional. A list of cluster enpoints.
* </pre>
*
* <code>
* repeated .google.cloud.redis.cluster.v1.ClusterEndpoint cluster_endpoints = 36 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
int getClusterEndpointsCount();
/**
*
*
* <pre>
* Optional. A list of cluster enpoints.
* </pre>
*
* <code>
* repeated .google.cloud.redis.cluster.v1.ClusterEndpoint cluster_endpoints = 36 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
java.util.List<? extends com.google.cloud.redis.cluster.v1.ClusterEndpointOrBuilder>
getClusterEndpointsOrBuilderList();
/**
*
*
* <pre>
* Optional. A list of cluster enpoints.
* </pre>
*
* <code>
* repeated .google.cloud.redis.cluster.v1.ClusterEndpoint cluster_endpoints = 36 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.redis.cluster.v1.ClusterEndpointOrBuilder getClusterEndpointsOrBuilder(
int index);
/**
*
*
* <pre>
* Optional. Output only. The backup collection full resource name. Example:
* projects/{project}/locations/{location}/backupCollections/{collection}
* </pre>
*
* <code>
* optional string backup_collection = 39 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return Whether the backupCollection field is set.
*/
boolean hasBackupCollection();
/**
*
*
* <pre>
* Optional. Output only. The backup collection full resource name. Example:
* projects/{project}/locations/{location}/backupCollections/{collection}
* </pre>
*
* <code>
* optional string backup_collection = 39 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The backupCollection.
*/
java.lang.String getBackupCollection();
/**
*
*
* <pre>
* Optional. Output only. The backup collection full resource name. Example:
* projects/{project}/locations/{location}/backupCollections/{collection}
* </pre>
*
* <code>
* optional string backup_collection = 39 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for backupCollection.
*/
com.google.protobuf.ByteString getBackupCollectionBytes();
/**
*
*
* <pre>
* Optional. The KMS key used to encrypt the at-rest data of the cluster.
* </pre>
*
* <code>
* optional string kms_key = 40 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return Whether the kmsKey field is set.
*/
boolean hasKmsKey();
/**
*
*
* <pre>
* Optional. The KMS key used to encrypt the at-rest data of the cluster.
* </pre>
*
* <code>
* optional string kms_key = 40 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The kmsKey.
*/
java.lang.String getKmsKey();
/**
*
*
* <pre>
* Optional. The KMS key used to encrypt the at-rest data of the cluster.
* </pre>
*
* <code>
* optional string kms_key = 40 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for kmsKey.
*/
com.google.protobuf.ByteString getKmsKeyBytes();
/**
*
*
* <pre>
* Optional. The automated backup config for the cluster.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.AutomatedBackupConfig automated_backup_config = 42 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the automatedBackupConfig field is set.
*/
boolean hasAutomatedBackupConfig();
/**
*
*
* <pre>
* Optional. The automated backup config for the cluster.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.AutomatedBackupConfig automated_backup_config = 42 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The automatedBackupConfig.
*/
com.google.cloud.redis.cluster.v1.AutomatedBackupConfig getAutomatedBackupConfig();
/**
*
*
* <pre>
* Optional. The automated backup config for the cluster.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.AutomatedBackupConfig automated_backup_config = 42 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.redis.cluster.v1.AutomatedBackupConfigOrBuilder
getAutomatedBackupConfigOrBuilder();
/**
*
*
* <pre>
* Output only. Encryption information of the data at rest of the cluster.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.EncryptionInfo encryption_info = 43 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the encryptionInfo field is set.
*/
boolean hasEncryptionInfo();
/**
*
*
* <pre>
* Output only. Encryption information of the data at rest of the cluster.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.EncryptionInfo encryption_info = 43 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The encryptionInfo.
*/
com.google.cloud.redis.cluster.v1.EncryptionInfo getEncryptionInfo();
/**
*
*
* <pre>
* Output only. Encryption information of the data at rest of the cluster.
* </pre>
*
* <code>
* .google.cloud.redis.cluster.v1.EncryptionInfo encryption_info = 43 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.redis.cluster.v1.EncryptionInfoOrBuilder getEncryptionInfoOrBuilder();
com.google.cloud.redis.cluster.v1.Cluster.ImportSourcesCase getImportSourcesCase();
}
|
googleapis/google-cloud-java | 37,002 | java-datalabeling/proto-google-cloud-datalabeling-v1beta1/src/main/java/com/google/cloud/datalabeling/v1beta1/ImportDataRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datalabeling/v1beta1/data_labeling_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datalabeling.v1beta1;
/**
*
*
* <pre>
* Request message for ImportData API.
* </pre>
*
* Protobuf type {@code google.cloud.datalabeling.v1beta1.ImportDataRequest}
*/
public final class ImportDataRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datalabeling.v1beta1.ImportDataRequest)
ImportDataRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ImportDataRequest.newBuilder() to construct.
private ImportDataRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ImportDataRequest() {
name_ = "";
userEmailAddress_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ImportDataRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ImportDataRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ImportDataRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datalabeling.v1beta1.ImportDataRequest.class,
com.google.cloud.datalabeling.v1beta1.ImportDataRequest.Builder.class);
}
private int bitField0_;
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. Dataset resource name, format:
* projects/{project_id}/datasets/{dataset_id}
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Dataset resource name, format:
* projects/{project_id}/datasets/{dataset_id}
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int INPUT_CONFIG_FIELD_NUMBER = 2;
private com.google.cloud.datalabeling.v1beta1.InputConfig inputConfig_;
/**
*
*
* <pre>
* Required. Specify the input source of the data.
* </pre>
*
* <code>
* .google.cloud.datalabeling.v1beta1.InputConfig input_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the inputConfig field is set.
*/
@java.lang.Override
public boolean hasInputConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Specify the input source of the data.
* </pre>
*
* <code>
* .google.cloud.datalabeling.v1beta1.InputConfig input_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The inputConfig.
*/
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.InputConfig getInputConfig() {
return inputConfig_ == null
? com.google.cloud.datalabeling.v1beta1.InputConfig.getDefaultInstance()
: inputConfig_;
}
/**
*
*
* <pre>
* Required. Specify the input source of the data.
* </pre>
*
* <code>
* .google.cloud.datalabeling.v1beta1.InputConfig input_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.InputConfigOrBuilder getInputConfigOrBuilder() {
return inputConfig_ == null
? com.google.cloud.datalabeling.v1beta1.InputConfig.getDefaultInstance()
: inputConfig_;
}
public static final int USER_EMAIL_ADDRESS_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object userEmailAddress_ = "";
/**
*
*
* <pre>
* Email of the user who started the import task and should be notified by
* email. If empty no notification will be sent.
* </pre>
*
* <code>string user_email_address = 3;</code>
*
* @return The userEmailAddress.
*/
@java.lang.Override
public java.lang.String getUserEmailAddress() {
java.lang.Object ref = userEmailAddress_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
userEmailAddress_ = s;
return s;
}
}
/**
*
*
* <pre>
* Email of the user who started the import task and should be notified by
* email. If empty no notification will be sent.
* </pre>
*
* <code>string user_email_address = 3;</code>
*
* @return The bytes for userEmailAddress.
*/
@java.lang.Override
public com.google.protobuf.ByteString getUserEmailAddressBytes() {
java.lang.Object ref = userEmailAddress_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
userEmailAddress_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getInputConfig());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(userEmailAddress_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, userEmailAddress_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getInputConfig());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(userEmailAddress_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, userEmailAddress_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datalabeling.v1beta1.ImportDataRequest)) {
return super.equals(obj);
}
com.google.cloud.datalabeling.v1beta1.ImportDataRequest other =
(com.google.cloud.datalabeling.v1beta1.ImportDataRequest) obj;
if (!getName().equals(other.getName())) return false;
if (hasInputConfig() != other.hasInputConfig()) return false;
if (hasInputConfig()) {
if (!getInputConfig().equals(other.getInputConfig())) return false;
}
if (!getUserEmailAddress().equals(other.getUserEmailAddress())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
if (hasInputConfig()) {
hash = (37 * hash) + INPUT_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getInputConfig().hashCode();
}
hash = (37 * hash) + USER_EMAIL_ADDRESS_FIELD_NUMBER;
hash = (53 * hash) + getUserEmailAddress().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datalabeling.v1beta1.ImportDataRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datalabeling.v1beta1.ImportDataRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ImportDataRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datalabeling.v1beta1.ImportDataRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ImportDataRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datalabeling.v1beta1.ImportDataRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ImportDataRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datalabeling.v1beta1.ImportDataRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ImportDataRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datalabeling.v1beta1.ImportDataRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ImportDataRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datalabeling.v1beta1.ImportDataRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.datalabeling.v1beta1.ImportDataRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for ImportData API.
* </pre>
*
* Protobuf type {@code google.cloud.datalabeling.v1beta1.ImportDataRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datalabeling.v1beta1.ImportDataRequest)
com.google.cloud.datalabeling.v1beta1.ImportDataRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ImportDataRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ImportDataRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datalabeling.v1beta1.ImportDataRequest.class,
com.google.cloud.datalabeling.v1beta1.ImportDataRequest.Builder.class);
}
// Construct using com.google.cloud.datalabeling.v1beta1.ImportDataRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getInputConfigFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
inputConfig_ = null;
if (inputConfigBuilder_ != null) {
inputConfigBuilder_.dispose();
inputConfigBuilder_ = null;
}
userEmailAddress_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ImportDataRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.ImportDataRequest getDefaultInstanceForType() {
return com.google.cloud.datalabeling.v1beta1.ImportDataRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.ImportDataRequest build() {
com.google.cloud.datalabeling.v1beta1.ImportDataRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.ImportDataRequest buildPartial() {
com.google.cloud.datalabeling.v1beta1.ImportDataRequest result =
new com.google.cloud.datalabeling.v1beta1.ImportDataRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.datalabeling.v1beta1.ImportDataRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.inputConfig_ =
inputConfigBuilder_ == null ? inputConfig_ : inputConfigBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.userEmailAddress_ = userEmailAddress_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datalabeling.v1beta1.ImportDataRequest) {
return mergeFrom((com.google.cloud.datalabeling.v1beta1.ImportDataRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datalabeling.v1beta1.ImportDataRequest other) {
if (other == com.google.cloud.datalabeling.v1beta1.ImportDataRequest.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasInputConfig()) {
mergeInputConfig(other.getInputConfig());
}
if (!other.getUserEmailAddress().isEmpty()) {
userEmailAddress_ = other.userEmailAddress_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getInputConfigFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
userEmailAddress_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. Dataset resource name, format:
* projects/{project_id}/datasets/{dataset_id}
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Dataset resource name, format:
* projects/{project_id}/datasets/{dataset_id}
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Dataset resource name, format:
* projects/{project_id}/datasets/{dataset_id}
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Dataset resource name, format:
* projects/{project_id}/datasets/{dataset_id}
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Dataset resource name, format:
* projects/{project_id}/datasets/{dataset_id}
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.datalabeling.v1beta1.InputConfig inputConfig_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datalabeling.v1beta1.InputConfig,
com.google.cloud.datalabeling.v1beta1.InputConfig.Builder,
com.google.cloud.datalabeling.v1beta1.InputConfigOrBuilder>
inputConfigBuilder_;
/**
*
*
* <pre>
* Required. Specify the input source of the data.
* </pre>
*
* <code>
* .google.cloud.datalabeling.v1beta1.InputConfig input_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the inputConfig field is set.
*/
public boolean hasInputConfig() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Specify the input source of the data.
* </pre>
*
* <code>
* .google.cloud.datalabeling.v1beta1.InputConfig input_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The inputConfig.
*/
public com.google.cloud.datalabeling.v1beta1.InputConfig getInputConfig() {
if (inputConfigBuilder_ == null) {
return inputConfig_ == null
? com.google.cloud.datalabeling.v1beta1.InputConfig.getDefaultInstance()
: inputConfig_;
} else {
return inputConfigBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Specify the input source of the data.
* </pre>
*
* <code>
* .google.cloud.datalabeling.v1beta1.InputConfig input_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setInputConfig(com.google.cloud.datalabeling.v1beta1.InputConfig value) {
if (inputConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
inputConfig_ = value;
} else {
inputConfigBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Specify the input source of the data.
* </pre>
*
* <code>
* .google.cloud.datalabeling.v1beta1.InputConfig input_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setInputConfig(
com.google.cloud.datalabeling.v1beta1.InputConfig.Builder builderForValue) {
if (inputConfigBuilder_ == null) {
inputConfig_ = builderForValue.build();
} else {
inputConfigBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Specify the input source of the data.
* </pre>
*
* <code>
* .google.cloud.datalabeling.v1beta1.InputConfig input_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeInputConfig(com.google.cloud.datalabeling.v1beta1.InputConfig value) {
if (inputConfigBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& inputConfig_ != null
&& inputConfig_
!= com.google.cloud.datalabeling.v1beta1.InputConfig.getDefaultInstance()) {
getInputConfigBuilder().mergeFrom(value);
} else {
inputConfig_ = value;
}
} else {
inputConfigBuilder_.mergeFrom(value);
}
if (inputConfig_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Specify the input source of the data.
* </pre>
*
* <code>
* .google.cloud.datalabeling.v1beta1.InputConfig input_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearInputConfig() {
bitField0_ = (bitField0_ & ~0x00000002);
inputConfig_ = null;
if (inputConfigBuilder_ != null) {
inputConfigBuilder_.dispose();
inputConfigBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Specify the input source of the data.
* </pre>
*
* <code>
* .google.cloud.datalabeling.v1beta1.InputConfig input_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.datalabeling.v1beta1.InputConfig.Builder getInputConfigBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getInputConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Specify the input source of the data.
* </pre>
*
* <code>
* .google.cloud.datalabeling.v1beta1.InputConfig input_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.datalabeling.v1beta1.InputConfigOrBuilder getInputConfigOrBuilder() {
if (inputConfigBuilder_ != null) {
return inputConfigBuilder_.getMessageOrBuilder();
} else {
return inputConfig_ == null
? com.google.cloud.datalabeling.v1beta1.InputConfig.getDefaultInstance()
: inputConfig_;
}
}
/**
*
*
* <pre>
* Required. Specify the input source of the data.
* </pre>
*
* <code>
* .google.cloud.datalabeling.v1beta1.InputConfig input_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datalabeling.v1beta1.InputConfig,
com.google.cloud.datalabeling.v1beta1.InputConfig.Builder,
com.google.cloud.datalabeling.v1beta1.InputConfigOrBuilder>
getInputConfigFieldBuilder() {
if (inputConfigBuilder_ == null) {
inputConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datalabeling.v1beta1.InputConfig,
com.google.cloud.datalabeling.v1beta1.InputConfig.Builder,
com.google.cloud.datalabeling.v1beta1.InputConfigOrBuilder>(
getInputConfig(), getParentForChildren(), isClean());
inputConfig_ = null;
}
return inputConfigBuilder_;
}
private java.lang.Object userEmailAddress_ = "";
/**
*
*
* <pre>
* Email of the user who started the import task and should be notified by
* email. If empty no notification will be sent.
* </pre>
*
* <code>string user_email_address = 3;</code>
*
* @return The userEmailAddress.
*/
public java.lang.String getUserEmailAddress() {
java.lang.Object ref = userEmailAddress_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
userEmailAddress_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Email of the user who started the import task and should be notified by
* email. If empty no notification will be sent.
* </pre>
*
* <code>string user_email_address = 3;</code>
*
* @return The bytes for userEmailAddress.
*/
public com.google.protobuf.ByteString getUserEmailAddressBytes() {
java.lang.Object ref = userEmailAddress_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
userEmailAddress_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Email of the user who started the import task and should be notified by
* email. If empty no notification will be sent.
* </pre>
*
* <code>string user_email_address = 3;</code>
*
* @param value The userEmailAddress to set.
* @return This builder for chaining.
*/
public Builder setUserEmailAddress(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
userEmailAddress_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Email of the user who started the import task and should be notified by
* email. If empty no notification will be sent.
* </pre>
*
* <code>string user_email_address = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearUserEmailAddress() {
userEmailAddress_ = getDefaultInstance().getUserEmailAddress();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Email of the user who started the import task and should be notified by
* email. If empty no notification will be sent.
* </pre>
*
* <code>string user_email_address = 3;</code>
*
* @param value The bytes for userEmailAddress to set.
* @return This builder for chaining.
*/
public Builder setUserEmailAddressBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
userEmailAddress_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datalabeling.v1beta1.ImportDataRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ImportDataRequest)
private static final com.google.cloud.datalabeling.v1beta1.ImportDataRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datalabeling.v1beta1.ImportDataRequest();
}
public static com.google.cloud.datalabeling.v1beta1.ImportDataRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ImportDataRequest> PARSER =
new com.google.protobuf.AbstractParser<ImportDataRequest>() {
@java.lang.Override
public ImportDataRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ImportDataRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ImportDataRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.ImportDataRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,200 | java-contentwarehouse/grpc-google-cloud-contentwarehouse-v1/src/main/java/com/google/cloud/contentwarehouse/v1/RuleSetServiceGrpc.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.contentwarehouse.v1;
import static io.grpc.MethodDescriptor.generateFullMethodName;
/**
*
*
* <pre>
* Service to manage customer specific RuleSets.
* </pre>
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler",
comments = "Source: google/cloud/contentwarehouse/v1/ruleset_service.proto")
@io.grpc.stub.annotations.GrpcGenerated
public final class RuleSetServiceGrpc {
private RuleSetServiceGrpc() {}
public static final java.lang.String SERVICE_NAME =
"google.cloud.contentwarehouse.v1.RuleSetService";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.contentwarehouse.v1.CreateRuleSetRequest,
com.google.cloud.contentwarehouse.v1.RuleSet>
getCreateRuleSetMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "CreateRuleSet",
requestType = com.google.cloud.contentwarehouse.v1.CreateRuleSetRequest.class,
responseType = com.google.cloud.contentwarehouse.v1.RuleSet.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.contentwarehouse.v1.CreateRuleSetRequest,
com.google.cloud.contentwarehouse.v1.RuleSet>
getCreateRuleSetMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.contentwarehouse.v1.CreateRuleSetRequest,
com.google.cloud.contentwarehouse.v1.RuleSet>
getCreateRuleSetMethod;
if ((getCreateRuleSetMethod = RuleSetServiceGrpc.getCreateRuleSetMethod) == null) {
synchronized (RuleSetServiceGrpc.class) {
if ((getCreateRuleSetMethod = RuleSetServiceGrpc.getCreateRuleSetMethod) == null) {
RuleSetServiceGrpc.getCreateRuleSetMethod =
getCreateRuleSetMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.contentwarehouse.v1.CreateRuleSetRequest,
com.google.cloud.contentwarehouse.v1.RuleSet>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "CreateRuleSet"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.contentwarehouse.v1.CreateRuleSetRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.contentwarehouse.v1.RuleSet.getDefaultInstance()))
.setSchemaDescriptor(
new RuleSetServiceMethodDescriptorSupplier("CreateRuleSet"))
.build();
}
}
}
return getCreateRuleSetMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.contentwarehouse.v1.GetRuleSetRequest,
com.google.cloud.contentwarehouse.v1.RuleSet>
getGetRuleSetMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "GetRuleSet",
requestType = com.google.cloud.contentwarehouse.v1.GetRuleSetRequest.class,
responseType = com.google.cloud.contentwarehouse.v1.RuleSet.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.contentwarehouse.v1.GetRuleSetRequest,
com.google.cloud.contentwarehouse.v1.RuleSet>
getGetRuleSetMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.contentwarehouse.v1.GetRuleSetRequest,
com.google.cloud.contentwarehouse.v1.RuleSet>
getGetRuleSetMethod;
if ((getGetRuleSetMethod = RuleSetServiceGrpc.getGetRuleSetMethod) == null) {
synchronized (RuleSetServiceGrpc.class) {
if ((getGetRuleSetMethod = RuleSetServiceGrpc.getGetRuleSetMethod) == null) {
RuleSetServiceGrpc.getGetRuleSetMethod =
getGetRuleSetMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.contentwarehouse.v1.GetRuleSetRequest,
com.google.cloud.contentwarehouse.v1.RuleSet>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetRuleSet"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.contentwarehouse.v1.GetRuleSetRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.contentwarehouse.v1.RuleSet.getDefaultInstance()))
.setSchemaDescriptor(new RuleSetServiceMethodDescriptorSupplier("GetRuleSet"))
.build();
}
}
}
return getGetRuleSetMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.contentwarehouse.v1.UpdateRuleSetRequest,
com.google.cloud.contentwarehouse.v1.RuleSet>
getUpdateRuleSetMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "UpdateRuleSet",
requestType = com.google.cloud.contentwarehouse.v1.UpdateRuleSetRequest.class,
responseType = com.google.cloud.contentwarehouse.v1.RuleSet.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.contentwarehouse.v1.UpdateRuleSetRequest,
com.google.cloud.contentwarehouse.v1.RuleSet>
getUpdateRuleSetMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.contentwarehouse.v1.UpdateRuleSetRequest,
com.google.cloud.contentwarehouse.v1.RuleSet>
getUpdateRuleSetMethod;
if ((getUpdateRuleSetMethod = RuleSetServiceGrpc.getUpdateRuleSetMethod) == null) {
synchronized (RuleSetServiceGrpc.class) {
if ((getUpdateRuleSetMethod = RuleSetServiceGrpc.getUpdateRuleSetMethod) == null) {
RuleSetServiceGrpc.getUpdateRuleSetMethod =
getUpdateRuleSetMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.contentwarehouse.v1.UpdateRuleSetRequest,
com.google.cloud.contentwarehouse.v1.RuleSet>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "UpdateRuleSet"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.contentwarehouse.v1.UpdateRuleSetRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.contentwarehouse.v1.RuleSet.getDefaultInstance()))
.setSchemaDescriptor(
new RuleSetServiceMethodDescriptorSupplier("UpdateRuleSet"))
.build();
}
}
}
return getUpdateRuleSetMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.contentwarehouse.v1.DeleteRuleSetRequest, com.google.protobuf.Empty>
getDeleteRuleSetMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "DeleteRuleSet",
requestType = com.google.cloud.contentwarehouse.v1.DeleteRuleSetRequest.class,
responseType = com.google.protobuf.Empty.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.contentwarehouse.v1.DeleteRuleSetRequest, com.google.protobuf.Empty>
getDeleteRuleSetMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.contentwarehouse.v1.DeleteRuleSetRequest, com.google.protobuf.Empty>
getDeleteRuleSetMethod;
if ((getDeleteRuleSetMethod = RuleSetServiceGrpc.getDeleteRuleSetMethod) == null) {
synchronized (RuleSetServiceGrpc.class) {
if ((getDeleteRuleSetMethod = RuleSetServiceGrpc.getDeleteRuleSetMethod) == null) {
RuleSetServiceGrpc.getDeleteRuleSetMethod =
getDeleteRuleSetMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.contentwarehouse.v1.DeleteRuleSetRequest,
com.google.protobuf.Empty>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "DeleteRuleSet"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.contentwarehouse.v1.DeleteRuleSetRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.protobuf.Empty.getDefaultInstance()))
.setSchemaDescriptor(
new RuleSetServiceMethodDescriptorSupplier("DeleteRuleSet"))
.build();
}
}
}
return getDeleteRuleSetMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.contentwarehouse.v1.ListRuleSetsRequest,
com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse>
getListRuleSetsMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "ListRuleSets",
requestType = com.google.cloud.contentwarehouse.v1.ListRuleSetsRequest.class,
responseType = com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.contentwarehouse.v1.ListRuleSetsRequest,
com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse>
getListRuleSetsMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.contentwarehouse.v1.ListRuleSetsRequest,
com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse>
getListRuleSetsMethod;
if ((getListRuleSetsMethod = RuleSetServiceGrpc.getListRuleSetsMethod) == null) {
synchronized (RuleSetServiceGrpc.class) {
if ((getListRuleSetsMethod = RuleSetServiceGrpc.getListRuleSetsMethod) == null) {
RuleSetServiceGrpc.getListRuleSetsMethod =
getListRuleSetsMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.contentwarehouse.v1.ListRuleSetsRequest,
com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListRuleSets"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.contentwarehouse.v1.ListRuleSetsRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse
.getDefaultInstance()))
.setSchemaDescriptor(
new RuleSetServiceMethodDescriptorSupplier("ListRuleSets"))
.build();
}
}
}
return getListRuleSetsMethod;
}
/** Creates a new async stub that supports all call types for the service */
public static RuleSetServiceStub newStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<RuleSetServiceStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<RuleSetServiceStub>() {
@java.lang.Override
public RuleSetServiceStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new RuleSetServiceStub(channel, callOptions);
}
};
return RuleSetServiceStub.newStub(factory, channel);
}
/** Creates a new blocking-style stub that supports all types of calls on the service */
public static RuleSetServiceBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<RuleSetServiceBlockingV2Stub> factory =
new io.grpc.stub.AbstractStub.StubFactory<RuleSetServiceBlockingV2Stub>() {
@java.lang.Override
public RuleSetServiceBlockingV2Stub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new RuleSetServiceBlockingV2Stub(channel, callOptions);
}
};
return RuleSetServiceBlockingV2Stub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static RuleSetServiceBlockingStub newBlockingStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<RuleSetServiceBlockingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<RuleSetServiceBlockingStub>() {
@java.lang.Override
public RuleSetServiceBlockingStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new RuleSetServiceBlockingStub(channel, callOptions);
}
};
return RuleSetServiceBlockingStub.newStub(factory, channel);
}
/** Creates a new ListenableFuture-style stub that supports unary calls on the service */
public static RuleSetServiceFutureStub newFutureStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<RuleSetServiceFutureStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<RuleSetServiceFutureStub>() {
@java.lang.Override
public RuleSetServiceFutureStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new RuleSetServiceFutureStub(channel, callOptions);
}
};
return RuleSetServiceFutureStub.newStub(factory, channel);
}
/**
*
*
* <pre>
* Service to manage customer specific RuleSets.
* </pre>
*/
public interface AsyncService {
/**
*
*
* <pre>
* Creates a ruleset.
* </pre>
*/
default void createRuleSet(
com.google.cloud.contentwarehouse.v1.CreateRuleSetRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.contentwarehouse.v1.RuleSet>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getCreateRuleSetMethod(), responseObserver);
}
/**
*
*
* <pre>
* Gets a ruleset. Returns NOT_FOUND if the ruleset does not exist.
* </pre>
*/
default void getRuleSet(
com.google.cloud.contentwarehouse.v1.GetRuleSetRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.contentwarehouse.v1.RuleSet>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetRuleSetMethod(), responseObserver);
}
/**
*
*
* <pre>
* Updates a ruleset. Returns INVALID_ARGUMENT if the name of the ruleset
* is non-empty and does not equal the existing name.
* </pre>
*/
default void updateRuleSet(
com.google.cloud.contentwarehouse.v1.UpdateRuleSetRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.contentwarehouse.v1.RuleSet>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getUpdateRuleSetMethod(), responseObserver);
}
/**
*
*
* <pre>
* Deletes a ruleset. Returns NOT_FOUND if the document does not exist.
* </pre>
*/
default void deleteRuleSet(
com.google.cloud.contentwarehouse.v1.DeleteRuleSetRequest request,
io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getDeleteRuleSetMethod(), responseObserver);
}
/**
*
*
* <pre>
* Lists rulesets.
* </pre>
*/
default void listRuleSets(
com.google.cloud.contentwarehouse.v1.ListRuleSetsRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getListRuleSetsMethod(), responseObserver);
}
}
/**
* Base class for the server implementation of the service RuleSetService.
*
* <pre>
* Service to manage customer specific RuleSets.
* </pre>
*/
public abstract static class RuleSetServiceImplBase
implements io.grpc.BindableService, AsyncService {
@java.lang.Override
public final io.grpc.ServerServiceDefinition bindService() {
return RuleSetServiceGrpc.bindService(this);
}
}
/**
* A stub to allow clients to do asynchronous rpc calls to service RuleSetService.
*
* <pre>
* Service to manage customer specific RuleSets.
* </pre>
*/
public static final class RuleSetServiceStub
extends io.grpc.stub.AbstractAsyncStub<RuleSetServiceStub> {
private RuleSetServiceStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected RuleSetServiceStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new RuleSetServiceStub(channel, callOptions);
}
/**
*
*
* <pre>
* Creates a ruleset.
* </pre>
*/
public void createRuleSet(
com.google.cloud.contentwarehouse.v1.CreateRuleSetRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.contentwarehouse.v1.RuleSet>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getCreateRuleSetMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Gets a ruleset. Returns NOT_FOUND if the ruleset does not exist.
* </pre>
*/
public void getRuleSet(
com.google.cloud.contentwarehouse.v1.GetRuleSetRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.contentwarehouse.v1.RuleSet>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getGetRuleSetMethod(), getCallOptions()), request, responseObserver);
}
/**
*
*
* <pre>
* Updates a ruleset. Returns INVALID_ARGUMENT if the name of the ruleset
* is non-empty and does not equal the existing name.
* </pre>
*/
public void updateRuleSet(
com.google.cloud.contentwarehouse.v1.UpdateRuleSetRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.contentwarehouse.v1.RuleSet>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getUpdateRuleSetMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Deletes a ruleset. Returns NOT_FOUND if the document does not exist.
* </pre>
*/
public void deleteRuleSet(
com.google.cloud.contentwarehouse.v1.DeleteRuleSetRequest request,
io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getDeleteRuleSetMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Lists rulesets.
* </pre>
*/
public void listRuleSets(
com.google.cloud.contentwarehouse.v1.ListRuleSetsRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getListRuleSetsMethod(), getCallOptions()),
request,
responseObserver);
}
}
/**
* A stub to allow clients to do synchronous rpc calls to service RuleSetService.
*
* <pre>
* Service to manage customer specific RuleSets.
* </pre>
*/
public static final class RuleSetServiceBlockingV2Stub
extends io.grpc.stub.AbstractBlockingStub<RuleSetServiceBlockingV2Stub> {
private RuleSetServiceBlockingV2Stub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected RuleSetServiceBlockingV2Stub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new RuleSetServiceBlockingV2Stub(channel, callOptions);
}
/**
*
*
* <pre>
* Creates a ruleset.
* </pre>
*/
public com.google.cloud.contentwarehouse.v1.RuleSet createRuleSet(
com.google.cloud.contentwarehouse.v1.CreateRuleSetRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCreateRuleSetMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Gets a ruleset. Returns NOT_FOUND if the ruleset does not exist.
* </pre>
*/
public com.google.cloud.contentwarehouse.v1.RuleSet getRuleSet(
com.google.cloud.contentwarehouse.v1.GetRuleSetRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetRuleSetMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Updates a ruleset. Returns INVALID_ARGUMENT if the name of the ruleset
* is non-empty and does not equal the existing name.
* </pre>
*/
public com.google.cloud.contentwarehouse.v1.RuleSet updateRuleSet(
com.google.cloud.contentwarehouse.v1.UpdateRuleSetRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getUpdateRuleSetMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Deletes a ruleset. Returns NOT_FOUND if the document does not exist.
* </pre>
*/
public com.google.protobuf.Empty deleteRuleSet(
com.google.cloud.contentwarehouse.v1.DeleteRuleSetRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getDeleteRuleSetMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Lists rulesets.
* </pre>
*/
public com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse listRuleSets(
com.google.cloud.contentwarehouse.v1.ListRuleSetsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getListRuleSetsMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do limited synchronous rpc calls to service RuleSetService.
*
* <pre>
* Service to manage customer specific RuleSets.
* </pre>
*/
public static final class RuleSetServiceBlockingStub
extends io.grpc.stub.AbstractBlockingStub<RuleSetServiceBlockingStub> {
private RuleSetServiceBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected RuleSetServiceBlockingStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new RuleSetServiceBlockingStub(channel, callOptions);
}
/**
*
*
* <pre>
* Creates a ruleset.
* </pre>
*/
public com.google.cloud.contentwarehouse.v1.RuleSet createRuleSet(
com.google.cloud.contentwarehouse.v1.CreateRuleSetRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCreateRuleSetMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Gets a ruleset. Returns NOT_FOUND if the ruleset does not exist.
* </pre>
*/
public com.google.cloud.contentwarehouse.v1.RuleSet getRuleSet(
com.google.cloud.contentwarehouse.v1.GetRuleSetRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetRuleSetMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Updates a ruleset. Returns INVALID_ARGUMENT if the name of the ruleset
* is non-empty and does not equal the existing name.
* </pre>
*/
public com.google.cloud.contentwarehouse.v1.RuleSet updateRuleSet(
com.google.cloud.contentwarehouse.v1.UpdateRuleSetRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getUpdateRuleSetMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Deletes a ruleset. Returns NOT_FOUND if the document does not exist.
* </pre>
*/
public com.google.protobuf.Empty deleteRuleSet(
com.google.cloud.contentwarehouse.v1.DeleteRuleSetRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getDeleteRuleSetMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Lists rulesets.
* </pre>
*/
public com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse listRuleSets(
com.google.cloud.contentwarehouse.v1.ListRuleSetsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getListRuleSetsMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do ListenableFuture-style rpc calls to service RuleSetService.
*
* <pre>
* Service to manage customer specific RuleSets.
* </pre>
*/
public static final class RuleSetServiceFutureStub
extends io.grpc.stub.AbstractFutureStub<RuleSetServiceFutureStub> {
private RuleSetServiceFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected RuleSetServiceFutureStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new RuleSetServiceFutureStub(channel, callOptions);
}
/**
*
*
* <pre>
* Creates a ruleset.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.contentwarehouse.v1.RuleSet>
createRuleSet(com.google.cloud.contentwarehouse.v1.CreateRuleSetRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getCreateRuleSetMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Gets a ruleset. Returns NOT_FOUND if the ruleset does not exist.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.contentwarehouse.v1.RuleSet>
getRuleSet(com.google.cloud.contentwarehouse.v1.GetRuleSetRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getGetRuleSetMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Updates a ruleset. Returns INVALID_ARGUMENT if the name of the ruleset
* is non-empty and does not equal the existing name.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.contentwarehouse.v1.RuleSet>
updateRuleSet(com.google.cloud.contentwarehouse.v1.UpdateRuleSetRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getUpdateRuleSetMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Deletes a ruleset. Returns NOT_FOUND if the document does not exist.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.Empty>
deleteRuleSet(com.google.cloud.contentwarehouse.v1.DeleteRuleSetRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getDeleteRuleSetMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Lists rulesets.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse>
listRuleSets(com.google.cloud.contentwarehouse.v1.ListRuleSetsRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getListRuleSetsMethod(), getCallOptions()), request);
}
}
private static final int METHODID_CREATE_RULE_SET = 0;
private static final int METHODID_GET_RULE_SET = 1;
private static final int METHODID_UPDATE_RULE_SET = 2;
private static final int METHODID_DELETE_RULE_SET = 3;
private static final int METHODID_LIST_RULE_SETS = 4;
private static final class MethodHandlers<Req, Resp>
implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final AsyncService serviceImpl;
private final int methodId;
MethodHandlers(AsyncService serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_CREATE_RULE_SET:
serviceImpl.createRuleSet(
(com.google.cloud.contentwarehouse.v1.CreateRuleSetRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.contentwarehouse.v1.RuleSet>)
responseObserver);
break;
case METHODID_GET_RULE_SET:
serviceImpl.getRuleSet(
(com.google.cloud.contentwarehouse.v1.GetRuleSetRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.contentwarehouse.v1.RuleSet>)
responseObserver);
break;
case METHODID_UPDATE_RULE_SET:
serviceImpl.updateRuleSet(
(com.google.cloud.contentwarehouse.v1.UpdateRuleSetRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.contentwarehouse.v1.RuleSet>)
responseObserver);
break;
case METHODID_DELETE_RULE_SET:
serviceImpl.deleteRuleSet(
(com.google.cloud.contentwarehouse.v1.DeleteRuleSetRequest) request,
(io.grpc.stub.StreamObserver<com.google.protobuf.Empty>) responseObserver);
break;
case METHODID_LIST_RULE_SETS:
serviceImpl.listRuleSets(
(com.google.cloud.contentwarehouse.v1.ListRuleSetsRequest) request,
(io.grpc.stub.StreamObserver<
com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse>)
responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
}
public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
getCreateRuleSetMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.contentwarehouse.v1.CreateRuleSetRequest,
com.google.cloud.contentwarehouse.v1.RuleSet>(
service, METHODID_CREATE_RULE_SET)))
.addMethod(
getGetRuleSetMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.contentwarehouse.v1.GetRuleSetRequest,
com.google.cloud.contentwarehouse.v1.RuleSet>(service, METHODID_GET_RULE_SET)))
.addMethod(
getUpdateRuleSetMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.contentwarehouse.v1.UpdateRuleSetRequest,
com.google.cloud.contentwarehouse.v1.RuleSet>(
service, METHODID_UPDATE_RULE_SET)))
.addMethod(
getDeleteRuleSetMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.contentwarehouse.v1.DeleteRuleSetRequest,
com.google.protobuf.Empty>(service, METHODID_DELETE_RULE_SET)))
.addMethod(
getListRuleSetsMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.contentwarehouse.v1.ListRuleSetsRequest,
com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse>(
service, METHODID_LIST_RULE_SETS)))
.build();
}
private abstract static class RuleSetServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoFileDescriptorSupplier,
io.grpc.protobuf.ProtoServiceDescriptorSupplier {
RuleSetServiceBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return com.google.cloud.contentwarehouse.v1.RuleSetServiceProto.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("RuleSetService");
}
}
private static final class RuleSetServiceFileDescriptorSupplier
extends RuleSetServiceBaseDescriptorSupplier {
RuleSetServiceFileDescriptorSupplier() {}
}
private static final class RuleSetServiceMethodDescriptorSupplier
extends RuleSetServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final java.lang.String methodName;
RuleSetServiceMethodDescriptorSupplier(java.lang.String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (RuleSetServiceGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor =
result =
io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new RuleSetServiceFileDescriptorSupplier())
.addMethod(getCreateRuleSetMethod())
.addMethod(getGetRuleSetMethod())
.addMethod(getUpdateRuleSetMethod())
.addMethod(getDeleteRuleSetMethod())
.addMethod(getListRuleSetsMethod())
.build();
}
}
}
return result;
}
}
|
googleapis/google-cloud-java | 37,089 | java-discoveryengine/proto-google-cloud-discoveryengine-v1alpha/src/main/java/com/google/cloud/discoveryengine/v1alpha/ListEvaluationResultsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/discoveryengine/v1alpha/evaluation_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.discoveryengine.v1alpha;
/**
*
*
* <pre>
* Request message for
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1alpha.EvaluationService.ListEvaluationResults]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest}
*/
public final class ListEvaluationResultsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest)
ListEvaluationResultsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListEvaluationResultsRequest.newBuilder() to construct.
private ListEvaluationResultsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListEvaluationResultsRequest() {
evaluation_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListEvaluationResultsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1alpha.EvaluationServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_ListEvaluationResultsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1alpha.EvaluationServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_ListEvaluationResultsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest.class,
com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest.Builder.class);
}
public static final int EVALUATION_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object evaluation_ = "";
/**
*
*
* <pre>
* Required. The evaluation resource name, such as
* `projects/{project}/locations/{location}/evaluations/{evaluation}`.
*
* If the caller does not have permission to list [EvaluationResult][]
* under this evaluation, regardless of whether or not this evaluation
* set exists, a `PERMISSION_DENIED` error is returned.
* </pre>
*
* <code>
* string evaluation = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The evaluation.
*/
@java.lang.Override
public java.lang.String getEvaluation() {
java.lang.Object ref = evaluation_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
evaluation_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The evaluation resource name, such as
* `projects/{project}/locations/{location}/evaluations/{evaluation}`.
*
* If the caller does not have permission to list [EvaluationResult][]
* under this evaluation, regardless of whether or not this evaluation
* set exists, a `PERMISSION_DENIED` error is returned.
* </pre>
*
* <code>
* string evaluation = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for evaluation.
*/
@java.lang.Override
public com.google.protobuf.ByteString getEvaluationBytes() {
java.lang.Object ref = evaluation_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
evaluation_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Maximum number of [EvaluationResult][] to return. If unspecified,
* defaults to 100. The maximum allowed value is 1000. Values above 1000 will
* be coerced to 1000.
*
* If this field is negative, an `INVALID_ARGUMENT` error is returned.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A page token
* [ListEvaluationResultsResponse.next_page_token][google.cloud.discoveryengine.v1alpha.ListEvaluationResultsResponse.next_page_token],
* received from a previous
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1alpha.EvaluationService.ListEvaluationResults]
* call. Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1alpha.EvaluationService.ListEvaluationResults]
* must match the call that provided the page token. Otherwise, an
* `INVALID_ARGUMENT` error is returned.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A page token
* [ListEvaluationResultsResponse.next_page_token][google.cloud.discoveryengine.v1alpha.ListEvaluationResultsResponse.next_page_token],
* received from a previous
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1alpha.EvaluationService.ListEvaluationResults]
* call. Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1alpha.EvaluationService.ListEvaluationResults]
* must match the call that provided the page token. Otherwise, an
* `INVALID_ARGUMENT` error is returned.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(evaluation_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, evaluation_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(evaluation_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, evaluation_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest)) {
return super.equals(obj);
}
com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest other =
(com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest) obj;
if (!getEvaluation().equals(other.getEvaluation())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + EVALUATION_FIELD_NUMBER;
hash = (53 * hash) + getEvaluation().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1alpha.EvaluationService.ListEvaluationResults]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest)
com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1alpha.EvaluationServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_ListEvaluationResultsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1alpha.EvaluationServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_ListEvaluationResultsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest.class,
com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest.Builder.class);
}
// Construct using
// com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
evaluation_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.discoveryengine.v1alpha.EvaluationServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_ListEvaluationResultsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest
getDefaultInstanceForType() {
return com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest build() {
com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest buildPartial() {
com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest result =
new com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.evaluation_ = evaluation_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest) {
return mergeFrom(
(com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest other) {
if (other
== com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest
.getDefaultInstance()) return this;
if (!other.getEvaluation().isEmpty()) {
evaluation_ = other.evaluation_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
evaluation_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object evaluation_ = "";
/**
*
*
* <pre>
* Required. The evaluation resource name, such as
* `projects/{project}/locations/{location}/evaluations/{evaluation}`.
*
* If the caller does not have permission to list [EvaluationResult][]
* under this evaluation, regardless of whether or not this evaluation
* set exists, a `PERMISSION_DENIED` error is returned.
* </pre>
*
* <code>
* string evaluation = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The evaluation.
*/
public java.lang.String getEvaluation() {
java.lang.Object ref = evaluation_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
evaluation_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The evaluation resource name, such as
* `projects/{project}/locations/{location}/evaluations/{evaluation}`.
*
* If the caller does not have permission to list [EvaluationResult][]
* under this evaluation, regardless of whether or not this evaluation
* set exists, a `PERMISSION_DENIED` error is returned.
* </pre>
*
* <code>
* string evaluation = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for evaluation.
*/
public com.google.protobuf.ByteString getEvaluationBytes() {
java.lang.Object ref = evaluation_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
evaluation_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The evaluation resource name, such as
* `projects/{project}/locations/{location}/evaluations/{evaluation}`.
*
* If the caller does not have permission to list [EvaluationResult][]
* under this evaluation, regardless of whether or not this evaluation
* set exists, a `PERMISSION_DENIED` error is returned.
* </pre>
*
* <code>
* string evaluation = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The evaluation to set.
* @return This builder for chaining.
*/
public Builder setEvaluation(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
evaluation_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The evaluation resource name, such as
* `projects/{project}/locations/{location}/evaluations/{evaluation}`.
*
* If the caller does not have permission to list [EvaluationResult][]
* under this evaluation, regardless of whether or not this evaluation
* set exists, a `PERMISSION_DENIED` error is returned.
* </pre>
*
* <code>
* string evaluation = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearEvaluation() {
evaluation_ = getDefaultInstance().getEvaluation();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The evaluation resource name, such as
* `projects/{project}/locations/{location}/evaluations/{evaluation}`.
*
* If the caller does not have permission to list [EvaluationResult][]
* under this evaluation, regardless of whether or not this evaluation
* set exists, a `PERMISSION_DENIED` error is returned.
* </pre>
*
* <code>
* string evaluation = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for evaluation to set.
* @return This builder for chaining.
*/
public Builder setEvaluationBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
evaluation_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Maximum number of [EvaluationResult][] to return. If unspecified,
* defaults to 100. The maximum allowed value is 1000. Values above 1000 will
* be coerced to 1000.
*
* If this field is negative, an `INVALID_ARGUMENT` error is returned.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Maximum number of [EvaluationResult][] to return. If unspecified,
* defaults to 100. The maximum allowed value is 1000. Values above 1000 will
* be coerced to 1000.
*
* If this field is negative, an `INVALID_ARGUMENT` error is returned.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Maximum number of [EvaluationResult][] to return. If unspecified,
* defaults to 100. The maximum allowed value is 1000. Values above 1000 will
* be coerced to 1000.
*
* If this field is negative, an `INVALID_ARGUMENT` error is returned.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A page token
* [ListEvaluationResultsResponse.next_page_token][google.cloud.discoveryengine.v1alpha.ListEvaluationResultsResponse.next_page_token],
* received from a previous
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1alpha.EvaluationService.ListEvaluationResults]
* call. Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1alpha.EvaluationService.ListEvaluationResults]
* must match the call that provided the page token. Otherwise, an
* `INVALID_ARGUMENT` error is returned.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A page token
* [ListEvaluationResultsResponse.next_page_token][google.cloud.discoveryengine.v1alpha.ListEvaluationResultsResponse.next_page_token],
* received from a previous
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1alpha.EvaluationService.ListEvaluationResults]
* call. Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1alpha.EvaluationService.ListEvaluationResults]
* must match the call that provided the page token. Otherwise, an
* `INVALID_ARGUMENT` error is returned.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A page token
* [ListEvaluationResultsResponse.next_page_token][google.cloud.discoveryengine.v1alpha.ListEvaluationResultsResponse.next_page_token],
* received from a previous
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1alpha.EvaluationService.ListEvaluationResults]
* call. Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1alpha.EvaluationService.ListEvaluationResults]
* must match the call that provided the page token. Otherwise, an
* `INVALID_ARGUMENT` error is returned.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* A page token
* [ListEvaluationResultsResponse.next_page_token][google.cloud.discoveryengine.v1alpha.ListEvaluationResultsResponse.next_page_token],
* received from a previous
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1alpha.EvaluationService.ListEvaluationResults]
* call. Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1alpha.EvaluationService.ListEvaluationResults]
* must match the call that provided the page token. Otherwise, an
* `INVALID_ARGUMENT` error is returned.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* A page token
* [ListEvaluationResultsResponse.next_page_token][google.cloud.discoveryengine.v1alpha.ListEvaluationResultsResponse.next_page_token],
* received from a previous
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1alpha.EvaluationService.ListEvaluationResults]
* call. Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1alpha.EvaluationService.ListEvaluationResults]
* must match the call that provided the page token. Otherwise, an
* `INVALID_ARGUMENT` error is returned.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest)
private static final com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest();
}
public static com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListEvaluationResultsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListEvaluationResultsRequest>() {
@java.lang.Override
public ListEvaluationResultsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListEvaluationResultsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListEvaluationResultsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
openjdk/jdk8 | 37,488 | jdk/src/share/classes/java/nio/channels/AsynchronousFileChannel.java | /*
* Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package java.nio.channels;
import java.nio.file.*;
import java.nio.file.attribute.FileAttribute;
import java.nio.file.spi.*;
import java.nio.ByteBuffer;
import java.io.IOException;
import java.util.concurrent.Future;
import java.util.concurrent.ExecutorService;
import java.util.Set;
import java.util.HashSet;
import java.util.Collections;
/**
* An asynchronous channel for reading, writing, and manipulating a file.
*
* <p> An asynchronous file channel is created when a file is opened by invoking
* one of the {@link #open open} methods defined by this class. The file contains
* a variable-length sequence of bytes that can be read and written and whose
* current size can be {@link #size() queried}. The size of the file increases
* when bytes are written beyond its current size; the size of the file decreases
* when it is {@link #truncate truncated}.
*
* <p> An asynchronous file channel does not have a <i>current position</i>
* within the file. Instead, the file position is specified to each read and
* write method that initiates asynchronous operations. A {@link CompletionHandler}
* is specified as a parameter and is invoked to consume the result of the I/O
* operation. This class also defines read and write methods that initiate
* asynchronous operations, returning a {@link Future} to represent the pending
* result of the operation. The {@code Future} may be used to check if the
* operation has completed, wait for its completion, and retrieve the result.
*
* <p> In addition to read and write operations, this class defines the
* following operations: </p>
*
* <ul>
*
* <li><p> Updates made to a file may be {@link #force <i>forced
* out</i>} to the underlying storage device, ensuring that data are not
* lost in the event of a system crash. </p></li>
*
* <li><p> A region of a file may be {@link #lock <i>locked</i>} against
* access by other programs. </p></li>
*
* </ul>
*
* <p> An {@code AsynchronousFileChannel} is associated with a thread pool to
* which tasks are submitted to handle I/O events and dispatch to completion
* handlers that consume the results of I/O operations on the channel. The
* completion handler for an I/O operation initiated on a channel is guaranteed
* to be invoked by one of the threads in the thread pool (This ensures that the
* completion handler is run by a thread with the expected <em>identity</em>).
* Where an I/O operation completes immediately, and the initiating thread is
* itself a thread in the thread pool, then the completion handler may be invoked
* directly by the initiating thread. When an {@code AsynchronousFileChannel} is
* created without specifying a thread pool then the channel is associated with
* a system-dependent default thread pool that may be shared with other
* channels. The default thread pool is configured by the system properties
* defined by the {@link AsynchronousChannelGroup} class.
*
* <p> Channels of this type are safe for use by multiple concurrent threads. The
* {@link Channel#close close} method may be invoked at any time, as specified
* by the {@link Channel} interface. This causes all outstanding asynchronous
* operations on the channel to complete with the exception {@link
* AsynchronousCloseException}. Multiple read and write operations may be
* outstanding at the same time. When multiple read and write operations are
* outstanding then the ordering of the I/O operations, and the order that the
* completion handlers are invoked, is not specified; they are not, in particular,
* guaranteed to execute in the order that the operations were initiated. The
* {@link java.nio.ByteBuffer ByteBuffers} used when reading or writing are not
* safe for use by multiple concurrent I/O operations. Furthermore, after an I/O
* operation is initiated then care should be taken to ensure that the buffer is
* not accessed until after the operation has completed.
*
* <p> As with {@link FileChannel}, the view of a file provided by an instance of
* this class is guaranteed to be consistent with other views of the same file
* provided by other instances in the same program. The view provided by an
* instance of this class may or may not, however, be consistent with the views
* seen by other concurrently-running programs due to caching performed by the
* underlying operating system and delays induced by network-filesystem protocols.
* This is true regardless of the language in which these other programs are
* written, and whether they are running on the same machine or on some other
* machine. The exact nature of any such inconsistencies are system-dependent
* and are therefore unspecified.
*
* @since 1.7
*/
public abstract class AsynchronousFileChannel
implements AsynchronousChannel
{
/**
* Initializes a new instance of this class.
*/
protected AsynchronousFileChannel() {
}
/**
* Opens or creates a file for reading and/or writing, returning an
* asynchronous file channel to access the file.
*
* <p> The {@code options} parameter determines how the file is opened.
* The {@link StandardOpenOption#READ READ} and {@link StandardOpenOption#WRITE
* WRITE} options determines if the file should be opened for reading and/or
* writing. If neither option is contained in the array then an existing file
* is opened for reading.
*
* <p> In addition to {@code READ} and {@code WRITE}, the following options
* may be present:
*
* <table border=1 cellpadding=5 summary="">
* <tr> <th>Option</th> <th>Description</th> </tr>
* <tr>
* <td> {@link StandardOpenOption#TRUNCATE_EXISTING TRUNCATE_EXISTING} </td>
* <td> When opening an existing file, the file is first truncated to a
* size of 0 bytes. This option is ignored when the file is opened only
* for reading.</td>
* </tr>
* <tr>
* <td> {@link StandardOpenOption#CREATE_NEW CREATE_NEW} </td>
* <td> If this option is present then a new file is created, failing if
* the file already exists. When creating a file the check for the
* existence of the file and the creation of the file if it does not exist
* is atomic with respect to other file system operations. This option is
* ignored when the file is opened only for reading. </td>
* </tr>
* <tr>
* <td > {@link StandardOpenOption#CREATE CREATE} </td>
* <td> If this option is present then an existing file is opened if it
* exists, otherwise a new file is created. When creating a file the check
* for the existence of the file and the creation of the file if it does
* not exist is atomic with respect to other file system operations. This
* option is ignored if the {@code CREATE_NEW} option is also present or
* the file is opened only for reading. </td>
* </tr>
* <tr>
* <td > {@link StandardOpenOption#DELETE_ON_CLOSE DELETE_ON_CLOSE} </td>
* <td> When this option is present then the implementation makes a
* <em>best effort</em> attempt to delete the file when closed by the
* the {@link #close close} method. If the {@code close} method is not
* invoked then a <em>best effort</em> attempt is made to delete the file
* when the Java virtual machine terminates. </td>
* </tr>
* <tr>
* <td>{@link StandardOpenOption#SPARSE SPARSE} </td>
* <td> When creating a new file this option is a <em>hint</em> that the
* new file will be sparse. This option is ignored when not creating
* a new file. </td>
* </tr>
* <tr>
* <td> {@link StandardOpenOption#SYNC SYNC} </td>
* <td> Requires that every update to the file's content or metadata be
* written synchronously to the underlying storage device. (see <a
* href="../file/package-summary.html#integrity"> Synchronized I/O file
* integrity</a>). </td>
* </tr>
* <tr>
* <td> {@link StandardOpenOption#DSYNC DSYNC} </td>
* <td> Requires that every update to the file's content be written
* synchronously to the underlying storage device. (see <a
* href="../file/package-summary.html#integrity"> Synchronized I/O file
* integrity</a>). </td>
* </tr>
* </table>
*
* <p> An implementation may also support additional options.
*
* <p> The {@code executor} parameter is the {@link ExecutorService} to
* which tasks are submitted to handle I/O events and dispatch completion
* results for operations initiated on resulting channel.
* The nature of these tasks is highly implementation specific and so care
* should be taken when configuring the {@code Executor}. Minimally it
* should support an unbounded work queue and should not run tasks on the
* caller thread of the {@link ExecutorService#execute execute} method.
* Shutting down the executor service while the channel is open results in
* unspecified behavior.
*
* <p> The {@code attrs} parameter is an optional array of file {@link
* FileAttribute file-attributes} to set atomically when creating the file.
*
* <p> The new channel is created by invoking the {@link
* FileSystemProvider#newFileChannel newFileChannel} method on the
* provider that created the {@code Path}.
*
* @param file
* The path of the file to open or create
* @param options
* Options specifying how the file is opened
* @param executor
* The thread pool or {@code null} to associate the channel with
* the default thread pool
* @param attrs
* An optional list of file attributes to set atomically when
* creating the file
*
* @return A new asynchronous file channel
*
* @throws IllegalArgumentException
* If the set contains an invalid combination of options
* @throws UnsupportedOperationException
* If the {@code file} is associated with a provider that does not
* support creating asynchronous file channels, or an unsupported
* open option is specified, or the array contains an attribute that
* cannot be set atomically when creating the file
* @throws IOException
* If an I/O error occurs
* @throws SecurityException
* If a security manager is installed and it denies an
* unspecified permission required by the implementation.
* In the case of the default provider, the {@link
* SecurityManager#checkRead(String)} method is invoked to check
* read access if the file is opened for reading. The {@link
* SecurityManager#checkWrite(String)} method is invoked to check
* write access if the file is opened for writing
*/
public static AsynchronousFileChannel open(Path file,
Set<? extends OpenOption> options,
ExecutorService executor,
FileAttribute<?>... attrs)
throws IOException
{
FileSystemProvider provider = file.getFileSystem().provider();
return provider.newAsynchronousFileChannel(file, options, executor, attrs);
}
@SuppressWarnings({"unchecked", "rawtypes"}) // generic array construction
private static final FileAttribute<?>[] NO_ATTRIBUTES = new FileAttribute[0];
/**
* Opens or creates a file for reading and/or writing, returning an
* asynchronous file channel to access the file.
*
* <p> An invocation of this method behaves in exactly the same way as the
* invocation
* <pre>
* ch.{@link #open(Path,Set,ExecutorService,FileAttribute[])
* open}(file, opts, null, new FileAttribute<?>[0]);
* </pre>
* where {@code opts} is a {@code Set} containing the options specified to
* this method.
*
* <p> The resulting channel is associated with default thread pool to which
* tasks are submitted to handle I/O events and dispatch to completion
* handlers that consume the result of asynchronous operations performed on
* the resulting channel.
*
* @param file
* The path of the file to open or create
* @param options
* Options specifying how the file is opened
*
* @return A new asynchronous file channel
*
* @throws IllegalArgumentException
* If the set contains an invalid combination of options
* @throws UnsupportedOperationException
* If the {@code file} is associated with a provider that does not
* support creating file channels, or an unsupported open option is
* specified
* @throws IOException
* If an I/O error occurs
* @throws SecurityException
* If a security manager is installed and it denies an
* unspecified permission required by the implementation.
* In the case of the default provider, the {@link
* SecurityManager#checkRead(String)} method is invoked to check
* read access if the file is opened for reading. The {@link
* SecurityManager#checkWrite(String)} method is invoked to check
* write access if the file is opened for writing
*/
public static AsynchronousFileChannel open(Path file, OpenOption... options)
throws IOException
{
Set<OpenOption> set = new HashSet<OpenOption>(options.length);
Collections.addAll(set, options);
return open(file, set, null, NO_ATTRIBUTES);
}
/**
* Returns the current size of this channel's file.
*
* @return The current size of this channel's file, measured in bytes
*
* @throws ClosedChannelException
* If this channel is closed
* @throws IOException
* If some other I/O error occurs
*/
public abstract long size() throws IOException;
/**
* Truncates this channel's file to the given size.
*
* <p> If the given size is less than the file's current size then the file
* is truncated, discarding any bytes beyond the new end of the file. If
* the given size is greater than or equal to the file's current size then
* the file is not modified. </p>
*
* @param size
* The new size, a non-negative byte count
*
* @return This file channel
*
* @throws NonWritableChannelException
* If this channel was not opened for writing
*
* @throws ClosedChannelException
* If this channel is closed
*
* @throws IllegalArgumentException
* If the new size is negative
*
* @throws IOException
* If some other I/O error occurs
*/
public abstract AsynchronousFileChannel truncate(long size) throws IOException;
/**
* Forces any updates to this channel's file to be written to the storage
* device that contains it.
*
* <p> If this channel's file resides on a local storage device then when
* this method returns it is guaranteed that all changes made to the file
* since this channel was created, or since this method was last invoked,
* will have been written to that device. This is useful for ensuring that
* critical information is not lost in the event of a system crash.
*
* <p> If the file does not reside on a local device then no such guarantee
* is made.
*
* <p> The {@code metaData} parameter can be used to limit the number of
* I/O operations that this method is required to perform. Passing
* {@code false} for this parameter indicates that only updates to the
* file's content need be written to storage; passing {@code true}
* indicates that updates to both the file's content and metadata must be
* written, which generally requires at least one more I/O operation.
* Whether this parameter actually has any effect is dependent upon the
* underlying operating system and is therefore unspecified.
*
* <p> Invoking this method may cause an I/O operation to occur even if the
* channel was only opened for reading. Some operating systems, for
* example, maintain a last-access time as part of a file's metadata, and
* this time is updated whenever the file is read. Whether or not this is
* actually done is system-dependent and is therefore unspecified.
*
* <p> This method is only guaranteed to force changes that were made to
* this channel's file via the methods defined in this class.
*
* @param metaData
* If {@code true} then this method is required to force changes
* to both the file's content and metadata to be written to
* storage; otherwise, it need only force content changes to be
* written
*
* @throws ClosedChannelException
* If this channel is closed
*
* @throws IOException
* If some other I/O error occurs
*/
public abstract void force(boolean metaData) throws IOException;
/**
* Acquires a lock on the given region of this channel's file.
*
* <p> This method initiates an operation to acquire a lock on the given
* region of this channel's file. The {@code handler} parameter is a
* completion handler that is invoked when the lock is acquired (or the
* operation fails). The result passed to the completion handler is the
* resulting {@code FileLock}.
*
* <p> The region specified by the {@code position} and {@code size}
* parameters need not be contained within, or even overlap, the actual
* underlying file. Lock regions are fixed in size; if a locked region
* initially contains the end of the file and the file grows beyond the
* region then the new portion of the file will not be covered by the lock.
* If a file is expected to grow in size and a lock on the entire file is
* required then a region starting at zero, and no smaller than the
* expected maximum size of the file, should be locked. The two-argument
* {@link #lock(Object,CompletionHandler)} method simply locks a region
* of size {@link Long#MAX_VALUE}. If a lock that overlaps the requested
* region is already held by this Java virtual machine, or this method has
* been invoked to lock an overlapping region and that operation has not
* completed, then this method throws {@link OverlappingFileLockException}.
*
* <p> Some operating systems do not support a mechanism to acquire a file
* lock in an asynchronous manner. Consequently an implementation may
* acquire the file lock in a background thread or from a task executed by
* a thread in the associated thread pool. If there are many lock operations
* outstanding then it may consume threads in the Java virtual machine for
* indefinite periods.
*
* <p> Some operating systems do not support shared locks, in which case a
* request for a shared lock is automatically converted into a request for
* an exclusive lock. Whether the newly-acquired lock is shared or
* exclusive may be tested by invoking the resulting lock object's {@link
* FileLock#isShared() isShared} method.
*
* <p> File locks are held on behalf of the entire Java virtual machine.
* They are not suitable for controlling access to a file by multiple
* threads within the same virtual machine.
*
* @param <A>
* The type of the attachment
* @param position
* The position at which the locked region is to start; must be
* non-negative
* @param size
* The size of the locked region; must be non-negative, and the sum
* {@code position} + {@code size} must be non-negative
* @param shared
* {@code true} to request a shared lock, in which case this
* channel must be open for reading (and possibly writing);
* {@code false} to request an exclusive lock, in which case this
* channel must be open for writing (and possibly reading)
* @param attachment
* The object to attach to the I/O operation; can be {@code null}
* @param handler
* The handler for consuming the result
*
* @throws OverlappingFileLockException
* If a lock that overlaps the requested region is already held by
* this Java virtual machine, or there is already a pending attempt
* to lock an overlapping region
* @throws IllegalArgumentException
* If the preconditions on the parameters do not hold
* @throws NonReadableChannelException
* If {@code shared} is true but this channel was not opened for reading
* @throws NonWritableChannelException
* If {@code shared} is false but this channel was not opened for writing
*/
public abstract <A> void lock(long position,
long size,
boolean shared,
A attachment,
CompletionHandler<FileLock,? super A> handler);
/**
* Acquires an exclusive lock on this channel's file.
*
* <p> This method initiates an operation to acquire a lock on the given
* region of this channel's file. The {@code handler} parameter is a
* completion handler that is invoked when the lock is acquired (or the
* operation fails). The result passed to the completion handler is the
* resulting {@code FileLock}.
*
* <p> An invocation of this method of the form {@code ch.lock(att,handler)}
* behaves in exactly the same way as the invocation
* <pre>
* ch.{@link #lock(long,long,boolean,Object,CompletionHandler) lock}(0L, Long.MAX_VALUE, false, att, handler)
* </pre>
*
* @param <A>
* The type of the attachment
* @param attachment
* The object to attach to the I/O operation; can be {@code null}
* @param handler
* The handler for consuming the result
*
* @throws OverlappingFileLockException
* If a lock is already held by this Java virtual machine, or there
* is already a pending attempt to lock a region
* @throws NonWritableChannelException
* If this channel was not opened for writing
*/
public final <A> void lock(A attachment,
CompletionHandler<FileLock,? super A> handler)
{
lock(0L, Long.MAX_VALUE, false, attachment, handler);
}
/**
* Acquires a lock on the given region of this channel's file.
*
* <p> This method initiates an operation to acquire a lock on the given
* region of this channel's file. The method behaves in exactly the same
* manner as the {@link #lock(long, long, boolean, Object, CompletionHandler)}
* method except that instead of specifying a completion handler, this
* method returns a {@code Future} representing the pending result. The
* {@code Future}'s {@link Future#get() get} method returns the {@link
* FileLock} on successful completion.
*
* @param position
* The position at which the locked region is to start; must be
* non-negative
* @param size
* The size of the locked region; must be non-negative, and the sum
* {@code position} + {@code size} must be non-negative
* @param shared
* {@code true} to request a shared lock, in which case this
* channel must be open for reading (and possibly writing);
* {@code false} to request an exclusive lock, in which case this
* channel must be open for writing (and possibly reading)
*
* @return a {@code Future} object representing the pending result
*
* @throws OverlappingFileLockException
* If a lock is already held by this Java virtual machine, or there
* is already a pending attempt to lock a region
* @throws IllegalArgumentException
* If the preconditions on the parameters do not hold
* @throws NonReadableChannelException
* If {@code shared} is true but this channel was not opened for reading
* @throws NonWritableChannelException
* If {@code shared} is false but this channel was not opened for writing
*/
public abstract Future<FileLock> lock(long position, long size, boolean shared);
/**
* Acquires an exclusive lock on this channel's file.
*
* <p> This method initiates an operation to acquire an exclusive lock on this
* channel's file. The method returns a {@code Future} representing the
* pending result of the operation. The {@code Future}'s {@link Future#get()
* get} method returns the {@link FileLock} on successful completion.
*
* <p> An invocation of this method behaves in exactly the same way as the
* invocation
* <pre>
* ch.{@link #lock(long,long,boolean) lock}(0L, Long.MAX_VALUE, false)
* </pre>
*
* @return a {@code Future} object representing the pending result
*
* @throws OverlappingFileLockException
* If a lock is already held by this Java virtual machine, or there
* is already a pending attempt to lock a region
* @throws NonWritableChannelException
* If this channel was not opened for writing
*/
public final Future<FileLock> lock() {
return lock(0L, Long.MAX_VALUE, false);
}
/**
* Attempts to acquire a lock on the given region of this channel's file.
*
* <p> This method does not block. An invocation always returns immediately,
* either having acquired a lock on the requested region or having failed to
* do so. If it fails to acquire a lock because an overlapping lock is held
* by another program then it returns {@code null}. If it fails to acquire
* a lock for any other reason then an appropriate exception is thrown.
*
* @param position
* The position at which the locked region is to start; must be
* non-negative
*
* @param size
* The size of the locked region; must be non-negative, and the sum
* {@code position} + {@code size} must be non-negative
*
* @param shared
* {@code true} to request a shared lock,
* {@code false} to request an exclusive lock
*
* @return A lock object representing the newly-acquired lock,
* or {@code null} if the lock could not be acquired
* because another program holds an overlapping lock
*
* @throws IllegalArgumentException
* If the preconditions on the parameters do not hold
* @throws ClosedChannelException
* If this channel is closed
* @throws OverlappingFileLockException
* If a lock that overlaps the requested region is already held by
* this Java virtual machine, or if another thread is already
* blocked in this method and is attempting to lock an overlapping
* region of the same file
* @throws NonReadableChannelException
* If {@code shared} is true but this channel was not opened for reading
* @throws NonWritableChannelException
* If {@code shared} is false but this channel was not opened for writing
*
* @throws IOException
* If some other I/O error occurs
*
* @see #lock(Object,CompletionHandler)
* @see #lock(long,long,boolean,Object,CompletionHandler)
* @see #tryLock()
*/
public abstract FileLock tryLock(long position, long size, boolean shared)
throws IOException;
/**
* Attempts to acquire an exclusive lock on this channel's file.
*
* <p> An invocation of this method of the form {@code ch.tryLock()}
* behaves in exactly the same way as the invocation
*
* <pre>
* ch.{@link #tryLock(long,long,boolean) tryLock}(0L, Long.MAX_VALUE, false) </pre>
*
* @return A lock object representing the newly-acquired lock,
* or {@code null} if the lock could not be acquired
* because another program holds an overlapping lock
*
* @throws ClosedChannelException
* If this channel is closed
* @throws OverlappingFileLockException
* If a lock that overlaps the requested region is already held by
* this Java virtual machine, or if another thread is already
* blocked in this method and is attempting to lock an overlapping
* region
* @throws NonWritableChannelException
* If {@code shared} is false but this channel was not opened for writing
*
* @throws IOException
* If some other I/O error occurs
*
* @see #lock(Object,CompletionHandler)
* @see #lock(long,long,boolean,Object,CompletionHandler)
* @see #tryLock(long,long,boolean)
*/
public final FileLock tryLock() throws IOException {
return tryLock(0L, Long.MAX_VALUE, false);
}
/**
* Reads a sequence of bytes from this channel into the given buffer,
* starting at the given file position.
*
* <p> This method initiates the reading of a sequence of bytes from this
* channel into the given buffer, starting at the given file position. The
* result of the read is the number of bytes read or {@code -1} if the given
* position is greater than or equal to the file's size at the time that the
* read is attempted.
*
* <p> This method works in the same manner as the {@link
* AsynchronousByteChannel#read(ByteBuffer,Object,CompletionHandler)}
* method, except that bytes are read starting at the given file position.
* If the given file position is greater than the file's size at the time
* that the read is attempted then no bytes are read.
*
* @param <A>
* The type of the attachment
* @param dst
* The buffer into which bytes are to be transferred
* @param position
* The file position at which the transfer is to begin;
* must be non-negative
* @param attachment
* The object to attach to the I/O operation; can be {@code null}
* @param handler
* The handler for consuming the result
*
* @throws IllegalArgumentException
* If the position is negative or the buffer is read-only
* @throws NonReadableChannelException
* If this channel was not opened for reading
*/
public abstract <A> void read(ByteBuffer dst,
long position,
A attachment,
CompletionHandler<Integer,? super A> handler);
/**
* Reads a sequence of bytes from this channel into the given buffer,
* starting at the given file position.
*
* <p> This method initiates the reading of a sequence of bytes from this
* channel into the given buffer, starting at the given file position. This
* method returns a {@code Future} representing the pending result of the
* operation. The {@code Future}'s {@link Future#get() get} method returns
* the number of bytes read or {@code -1} if the given position is greater
* than or equal to the file's size at the time that the read is attempted.
*
* <p> This method works in the same manner as the {@link
* AsynchronousByteChannel#read(ByteBuffer)} method, except that bytes are
* read starting at the given file position. If the given file position is
* greater than the file's size at the time that the read is attempted then
* no bytes are read.
*
* @param dst
* The buffer into which bytes are to be transferred
* @param position
* The file position at which the transfer is to begin;
* must be non-negative
*
* @return A {@code Future} object representing the pending result
*
* @throws IllegalArgumentException
* If the position is negative or the buffer is read-only
* @throws NonReadableChannelException
* If this channel was not opened for reading
*/
public abstract Future<Integer> read(ByteBuffer dst, long position);
/**
* Writes a sequence of bytes to this channel from the given buffer, starting
* at the given file position.
*
* <p> This method works in the same manner as the {@link
* AsynchronousByteChannel#write(ByteBuffer,Object,CompletionHandler)}
* method, except that bytes are written starting at the given file position.
* If the given position is greater than the file's size, at the time that
* the write is attempted, then the file will be grown to accommodate the new
* bytes; the values of any bytes between the previous end-of-file and the
* newly-written bytes are unspecified.
*
* @param <A>
* The type of the attachment
* @param src
* The buffer from which bytes are to be transferred
* @param position
* The file position at which the transfer is to begin;
* must be non-negative
* @param attachment
* The object to attach to the I/O operation; can be {@code null}
* @param handler
* The handler for consuming the result
*
* @throws IllegalArgumentException
* If the position is negative
* @throws NonWritableChannelException
* If this channel was not opened for writing
*/
public abstract <A> void write(ByteBuffer src,
long position,
A attachment,
CompletionHandler<Integer,? super A> handler);
/**
* Writes a sequence of bytes to this channel from the given buffer, starting
* at the given file position.
*
* <p> This method initiates the writing of a sequence of bytes to this
* channel from the given buffer, starting at the given file position. The
* method returns a {@code Future} representing the pending result of the
* write operation. The {@code Future}'s {@link Future#get() get} method
* returns the number of bytes written.
*
* <p> This method works in the same manner as the {@link
* AsynchronousByteChannel#write(ByteBuffer)} method, except that bytes are
* written starting at the given file position. If the given position is
* greater than the file's size, at the time that the write is attempted,
* then the file will be grown to accommodate the new bytes; the values of
* any bytes between the previous end-of-file and the newly-written bytes
* are unspecified.
*
* @param src
* The buffer from which bytes are to be transferred
* @param position
* The file position at which the transfer is to begin;
* must be non-negative
*
* @return A {@code Future} object representing the pending result
*
* @throws IllegalArgumentException
* If the position is negative
* @throws NonWritableChannelException
* If this channel was not opened for writing
*/
public abstract Future<Integer> write(ByteBuffer src, long position);
}
|
apache/lucene | 37,257 | lucene/core/src/java/org/apache/lucene/codecs/lucene104/Lucene104ScalarQuantizedVectorsWriter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.codecs.lucene104;
import static org.apache.lucene.codecs.lucene104.Lucene104ScalarQuantizedVectorsFormat.DIRECT_MONOTONIC_BLOCK_SHIFT;
import static org.apache.lucene.codecs.lucene104.Lucene104ScalarQuantizedVectorsFormat.QUANTIZED_VECTOR_COMPONENT;
import static org.apache.lucene.index.VectorSimilarityFunction.COSINE;
import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS;
import static org.apache.lucene.util.RamUsageEstimator.shallowSizeOfInstance;
import static org.apache.lucene.util.quantization.OptimizedScalarQuantizer.packAsBinary;
import static org.apache.lucene.util.quantization.OptimizedScalarQuantizer.transposeHalfByte;
import java.io.Closeable;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.KnnVectorsReader;
import org.apache.lucene.codecs.hnsw.FlatFieldVectorsWriter;
import org.apache.lucene.codecs.hnsw.FlatVectorsWriter;
import org.apache.lucene.codecs.lucene104.Lucene104ScalarQuantizedVectorsFormat.ScalarEncoding;
import org.apache.lucene.codecs.lucene95.OrdToDocDISIReaderConfiguration;
import org.apache.lucene.index.DocsWithFieldSet;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FloatVectorValues;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.KnnVectorValues;
import org.apache.lucene.index.MergeState;
import org.apache.lucene.index.SegmentWriteState;
import org.apache.lucene.index.Sorter;
import org.apache.lucene.index.VectorEncoding;
import org.apache.lucene.index.VectorSimilarityFunction;
import org.apache.lucene.internal.hppc.FloatArrayList;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.VectorScorer;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.VectorUtil;
import org.apache.lucene.util.hnsw.CloseableRandomVectorScorerSupplier;
import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier;
import org.apache.lucene.util.hnsw.UpdateableRandomVectorScorer;
import org.apache.lucene.util.quantization.OptimizedScalarQuantizer;
/** Copied from Lucene, replace with Lucene's implementation sometime after Lucene 10 */
public class Lucene104ScalarQuantizedVectorsWriter extends FlatVectorsWriter {
private static final long SHALLOW_RAM_BYTES_USED =
shallowSizeOfInstance(Lucene104ScalarQuantizedVectorsWriter.class);
private final SegmentWriteState segmentWriteState;
private final List<FieldWriter> fields = new ArrayList<>();
private final IndexOutput meta, vectorData;
private final ScalarEncoding encoding;
private final FlatVectorsWriter rawVectorDelegate;
private final Lucene104ScalarQuantizedVectorScorer vectorsScorer;
private boolean finished;
/**
* Sole constructor
*
* @param vectorsScorer the scorer to use for scoring vectors
*/
protected Lucene104ScalarQuantizedVectorsWriter(
SegmentWriteState state,
ScalarEncoding encoding,
FlatVectorsWriter rawVectorDelegate,
Lucene104ScalarQuantizedVectorScorer vectorsScorer)
throws IOException {
super(vectorsScorer);
this.encoding = encoding;
this.vectorsScorer = vectorsScorer;
this.segmentWriteState = state;
String metaFileName =
IndexFileNames.segmentFileName(
state.segmentInfo.name,
state.segmentSuffix,
Lucene104ScalarQuantizedVectorsFormat.META_EXTENSION);
String vectorDataFileName =
IndexFileNames.segmentFileName(
state.segmentInfo.name,
state.segmentSuffix,
Lucene104ScalarQuantizedVectorsFormat.VECTOR_DATA_EXTENSION);
this.rawVectorDelegate = rawVectorDelegate;
try {
meta = state.directory.createOutput(metaFileName, state.context);
vectorData = state.directory.createOutput(vectorDataFileName, state.context);
CodecUtil.writeIndexHeader(
meta,
Lucene104ScalarQuantizedVectorsFormat.META_CODEC_NAME,
Lucene104ScalarQuantizedVectorsFormat.VERSION_CURRENT,
state.segmentInfo.getId(),
state.segmentSuffix);
CodecUtil.writeIndexHeader(
vectorData,
Lucene104ScalarQuantizedVectorsFormat.VECTOR_DATA_CODEC_NAME,
Lucene104ScalarQuantizedVectorsFormat.VERSION_CURRENT,
state.segmentInfo.getId(),
state.segmentSuffix);
} catch (Throwable t) {
IOUtils.closeWhileSuppressingExceptions(t, this);
throw t;
}
}
@Override
public FlatFieldVectorsWriter<?> addField(FieldInfo fieldInfo) throws IOException {
FlatFieldVectorsWriter<?> rawVectorDelegate = this.rawVectorDelegate.addField(fieldInfo);
if (fieldInfo.getVectorEncoding().equals(VectorEncoding.FLOAT32)) {
@SuppressWarnings("unchecked")
FieldWriter fieldWriter =
new FieldWriter(fieldInfo, (FlatFieldVectorsWriter<float[]>) rawVectorDelegate);
fields.add(fieldWriter);
return fieldWriter;
}
return rawVectorDelegate;
}
@Override
public void flush(int maxDoc, Sorter.DocMap sortMap) throws IOException {
rawVectorDelegate.flush(maxDoc, sortMap);
for (FieldWriter field : fields) {
// after raw vectors are written, normalize vectors for clustering and quantization
if (VectorSimilarityFunction.COSINE == field.fieldInfo.getVectorSimilarityFunction()) {
field.normalizeVectors();
}
final float[] clusterCenter;
int vectorCount = field.flatFieldVectorsWriter.getVectors().size();
clusterCenter = new float[field.dimensionSums.length];
if (vectorCount > 0) {
for (int i = 0; i < field.dimensionSums.length; i++) {
clusterCenter[i] = field.dimensionSums[i] / vectorCount;
}
if (VectorSimilarityFunction.COSINE == field.fieldInfo.getVectorSimilarityFunction()) {
VectorUtil.l2normalize(clusterCenter);
}
}
if (segmentWriteState.infoStream.isEnabled(QUANTIZED_VECTOR_COMPONENT)) {
segmentWriteState.infoStream.message(
QUANTIZED_VECTOR_COMPONENT, "Vectors' count:" + vectorCount);
}
OptimizedScalarQuantizer quantizer =
new OptimizedScalarQuantizer(field.fieldInfo.getVectorSimilarityFunction());
if (sortMap == null) {
writeField(field, clusterCenter, maxDoc, quantizer);
} else {
writeSortingField(field, clusterCenter, maxDoc, sortMap, quantizer);
}
field.finish();
}
}
private void writeField(
FieldWriter fieldData, float[] clusterCenter, int maxDoc, OptimizedScalarQuantizer quantizer)
throws IOException {
// write vector values
long vectorDataOffset = vectorData.alignFilePointer(Float.BYTES);
writeVectors(fieldData, clusterCenter, quantizer);
long vectorDataLength = vectorData.getFilePointer() - vectorDataOffset;
float centroidDp =
!fieldData.getVectors().isEmpty() ? VectorUtil.dotProduct(clusterCenter, clusterCenter) : 0;
writeMeta(
fieldData.fieldInfo,
maxDoc,
vectorDataOffset,
vectorDataLength,
clusterCenter,
centroidDp,
fieldData.getDocsWithFieldSet());
}
private void writeVectors(
FieldWriter fieldData, float[] clusterCenter, OptimizedScalarQuantizer scalarQuantizer)
throws IOException {
byte[] scratch =
new byte[encoding.getDiscreteDimensions(fieldData.fieldInfo.getVectorDimension())];
byte[] vector =
switch (encoding) {
case UNSIGNED_BYTE, SEVEN_BIT -> scratch;
case PACKED_NIBBLE, SINGLE_BIT_QUERY_NIBBLE ->
new byte[encoding.getDocPackedLength(scratch.length)];
};
for (int i = 0; i < fieldData.getVectors().size(); i++) {
float[] v = fieldData.getVectors().get(i);
OptimizedScalarQuantizer.QuantizationResult corrections =
scalarQuantizer.scalarQuantize(v, scratch, encoding.getBits(), clusterCenter);
switch (encoding) {
case PACKED_NIBBLE -> OffHeapScalarQuantizedVectorValues.packNibbles(scratch, vector);
case SINGLE_BIT_QUERY_NIBBLE -> OptimizedScalarQuantizer.packAsBinary(scratch, vector);
case UNSIGNED_BYTE, SEVEN_BIT -> {}
}
vectorData.writeBytes(vector, vector.length);
vectorData.writeInt(Float.floatToIntBits(corrections.lowerInterval()));
vectorData.writeInt(Float.floatToIntBits(corrections.upperInterval()));
vectorData.writeInt(Float.floatToIntBits(corrections.additionalCorrection()));
vectorData.writeInt(corrections.quantizedComponentSum());
}
}
private void writeSortingField(
FieldWriter fieldData,
float[] clusterCenter,
int maxDoc,
Sorter.DocMap sortMap,
OptimizedScalarQuantizer scalarQuantizer)
throws IOException {
final int[] ordMap =
new int[fieldData.getDocsWithFieldSet().cardinality()]; // new ord to old ord
DocsWithFieldSet newDocsWithField = new DocsWithFieldSet();
mapOldOrdToNewOrd(fieldData.getDocsWithFieldSet(), sortMap, null, ordMap, newDocsWithField);
// write vector values
long vectorDataOffset = vectorData.alignFilePointer(Float.BYTES);
writeSortedVectors(fieldData, clusterCenter, ordMap, scalarQuantizer);
long quantizedVectorLength = vectorData.getFilePointer() - vectorDataOffset;
float centroidDp = VectorUtil.dotProduct(clusterCenter, clusterCenter);
writeMeta(
fieldData.fieldInfo,
maxDoc,
vectorDataOffset,
quantizedVectorLength,
clusterCenter,
centroidDp,
newDocsWithField);
}
private void writeSortedVectors(
FieldWriter fieldData,
float[] clusterCenter,
int[] ordMap,
OptimizedScalarQuantizer scalarQuantizer)
throws IOException {
byte[] scratch =
new byte[encoding.getDiscreteDimensions(fieldData.fieldInfo.getVectorDimension())];
byte[] vector =
switch (encoding) {
case UNSIGNED_BYTE, SEVEN_BIT -> scratch;
case PACKED_NIBBLE, SINGLE_BIT_QUERY_NIBBLE ->
new byte[encoding.getDocPackedLength(scratch.length)];
};
for (int ordinal : ordMap) {
float[] v = fieldData.getVectors().get(ordinal);
OptimizedScalarQuantizer.QuantizationResult corrections =
scalarQuantizer.scalarQuantize(v, scratch, encoding.getBits(), clusterCenter);
switch (encoding) {
case PACKED_NIBBLE -> OffHeapScalarQuantizedVectorValues.packNibbles(scratch, vector);
case SINGLE_BIT_QUERY_NIBBLE -> OptimizedScalarQuantizer.packAsBinary(scratch, vector);
case UNSIGNED_BYTE, SEVEN_BIT -> {}
}
vectorData.writeBytes(vector, vector.length);
vectorData.writeInt(Float.floatToIntBits(corrections.lowerInterval()));
vectorData.writeInt(Float.floatToIntBits(corrections.upperInterval()));
vectorData.writeInt(Float.floatToIntBits(corrections.additionalCorrection()));
vectorData.writeInt(corrections.quantizedComponentSum());
}
}
private void writeMeta(
FieldInfo field,
int maxDoc,
long vectorDataOffset,
long vectorDataLength,
float[] clusterCenter,
float centroidDp,
DocsWithFieldSet docsWithField)
throws IOException {
meta.writeInt(field.number);
meta.writeInt(field.getVectorEncoding().ordinal());
meta.writeInt(field.getVectorSimilarityFunction().ordinal());
meta.writeVInt(field.getVectorDimension());
meta.writeVLong(vectorDataOffset);
meta.writeVLong(vectorDataLength);
int count = docsWithField.cardinality();
meta.writeVInt(count);
if (count > 0) {
meta.writeVInt(encoding.getWireNumber());
final ByteBuffer buffer =
ByteBuffer.allocate(field.getVectorDimension() * Float.BYTES)
.order(ByteOrder.LITTLE_ENDIAN);
buffer.asFloatBuffer().put(clusterCenter);
meta.writeBytes(buffer.array(), buffer.array().length);
meta.writeInt(Float.floatToIntBits(centroidDp));
}
OrdToDocDISIReaderConfiguration.writeStoredMeta(
DIRECT_MONOTONIC_BLOCK_SHIFT, meta, vectorData, count, maxDoc, docsWithField);
}
@Override
public void finish() throws IOException {
if (finished) {
throw new IllegalStateException("already finished");
}
finished = true;
rawVectorDelegate.finish();
if (meta != null) {
// write end of fields marker
meta.writeInt(-1);
CodecUtil.writeFooter(meta);
}
if (vectorData != null) {
CodecUtil.writeFooter(vectorData);
}
}
@Override
public void mergeOneField(FieldInfo fieldInfo, MergeState mergeState) throws IOException {
if (!fieldInfo.getVectorEncoding().equals(VectorEncoding.FLOAT32)) {
rawVectorDelegate.mergeOneField(fieldInfo, mergeState);
return;
}
final float[] centroid;
final float[] mergedCentroid = new float[fieldInfo.getVectorDimension()];
int vectorCount = mergeAndRecalculateCentroids(mergeState, fieldInfo, mergedCentroid);
// Don't need access to the random vectors, we can just use the merged
rawVectorDelegate.mergeOneField(fieldInfo, mergeState);
centroid = mergedCentroid;
if (segmentWriteState.infoStream.isEnabled(QUANTIZED_VECTOR_COMPONENT)) {
segmentWriteState.infoStream.message(
QUANTIZED_VECTOR_COMPONENT, "Vectors' count:" + vectorCount);
}
FloatVectorValues floatVectorValues =
MergedVectorValues.mergeFloatVectorValues(fieldInfo, mergeState);
if (fieldInfo.getVectorSimilarityFunction() == COSINE) {
floatVectorValues = new NormalizedFloatVectorValues(floatVectorValues);
}
QuantizedFloatVectorValues quantizedVectorValues =
new QuantizedFloatVectorValues(
floatVectorValues,
new OptimizedScalarQuantizer(fieldInfo.getVectorSimilarityFunction()),
encoding,
centroid);
long vectorDataOffset = vectorData.alignFilePointer(Float.BYTES);
DocsWithFieldSet docsWithField = writeVectorData(vectorData, quantizedVectorValues);
long vectorDataLength = vectorData.getFilePointer() - vectorDataOffset;
float centroidDp =
docsWithField.cardinality() > 0 ? VectorUtil.dotProduct(centroid, centroid) : 0;
writeMeta(
fieldInfo,
segmentWriteState.segmentInfo.maxDoc(),
vectorDataOffset,
vectorDataLength,
centroid,
centroidDp,
docsWithField);
}
static DocsWithFieldSet writeVectorData(
IndexOutput output, QuantizedByteVectorValues quantizedByteVectorValues) throws IOException {
DocsWithFieldSet docsWithField = new DocsWithFieldSet();
KnnVectorValues.DocIndexIterator iterator = quantizedByteVectorValues.iterator();
for (int docV = iterator.nextDoc(); docV != NO_MORE_DOCS; docV = iterator.nextDoc()) {
// write vector
byte[] binaryValue = quantizedByteVectorValues.vectorValue(iterator.index());
output.writeBytes(binaryValue, binaryValue.length);
OptimizedScalarQuantizer.QuantizationResult corrections =
quantizedByteVectorValues.getCorrectiveTerms(iterator.index());
output.writeInt(Float.floatToIntBits(corrections.lowerInterval()));
output.writeInt(Float.floatToIntBits(corrections.upperInterval()));
output.writeInt(Float.floatToIntBits(corrections.additionalCorrection()));
output.writeInt(corrections.quantizedComponentSum());
docsWithField.add(docV);
}
return docsWithField;
}
static DocsWithFieldSet writeBinarizedVectorAndQueryData(
IndexOutput binarizedVectorData,
ScalarEncoding encoding,
IndexOutput binarizedQueryData,
FloatVectorValues floatVectorValues,
float[] centroid,
OptimizedScalarQuantizer binaryQuantizer)
throws IOException {
if (encoding.isAsymmetric() == false) {
throw new IllegalArgumentException("encoding and queryEncoding must be different");
}
DocsWithFieldSet docsWithField = new DocsWithFieldSet();
int discretizedDims = encoding.getDiscreteDimensions(floatVectorValues.dimension());
byte[][] quantizationScratch = new byte[2][];
quantizationScratch[0] = new byte[discretizedDims];
quantizationScratch[1] = new byte[discretizedDims];
byte[] toIndex = new byte[encoding.getDocPackedLength(discretizedDims)];
byte[] toQuery = new byte[encoding.getQueryPackedLength(discretizedDims)];
KnnVectorValues.DocIndexIterator iterator = floatVectorValues.iterator();
for (int docV = iterator.nextDoc(); docV != NO_MORE_DOCS; docV = iterator.nextDoc()) {
// write index vector
OptimizedScalarQuantizer.QuantizationResult[] r =
binaryQuantizer.multiScalarQuantize(
floatVectorValues.vectorValue(iterator.index()),
quantizationScratch,
new byte[] {encoding.getBits(), encoding.getQueryBits()},
centroid);
// pack and store document bit vector
packAsBinary(quantizationScratch[0], toIndex);
binarizedVectorData.writeBytes(toIndex, toIndex.length);
binarizedVectorData.writeInt(Float.floatToIntBits(r[0].lowerInterval()));
binarizedVectorData.writeInt(Float.floatToIntBits(r[0].upperInterval()));
binarizedVectorData.writeInt(Float.floatToIntBits(r[0].additionalCorrection()));
binarizedVectorData.writeInt(r[0].quantizedComponentSum());
docsWithField.add(docV);
// pack and store the 4bit query vector
transposeHalfByte(quantizationScratch[1], toQuery);
binarizedQueryData.writeBytes(toQuery, toQuery.length);
binarizedQueryData.writeInt(Float.floatToIntBits(r[1].lowerInterval()));
binarizedQueryData.writeInt(Float.floatToIntBits(r[1].upperInterval()));
binarizedQueryData.writeInt(Float.floatToIntBits(r[1].additionalCorrection()));
binarizedQueryData.writeInt(r[1].quantizedComponentSum());
}
return docsWithField;
}
@Override
public CloseableRandomVectorScorerSupplier mergeOneFieldToIndex(
FieldInfo fieldInfo, MergeState mergeState) throws IOException {
if (!fieldInfo.getVectorEncoding().equals(VectorEncoding.FLOAT32)) {
return rawVectorDelegate.mergeOneFieldToIndex(fieldInfo, mergeState);
}
final float[] centroid;
final float cDotC;
final float[] mergedCentroid = new float[fieldInfo.getVectorDimension()];
int vectorCount = mergeAndRecalculateCentroids(mergeState, fieldInfo, mergedCentroid);
// Don't need access to the random vectors, we can just use the merged
rawVectorDelegate.mergeOneField(fieldInfo, mergeState);
centroid = mergedCentroid;
cDotC = vectorCount > 0 ? VectorUtil.dotProduct(centroid, centroid) : 0;
if (segmentWriteState.infoStream.isEnabled(QUANTIZED_VECTOR_COMPONENT)) {
segmentWriteState.infoStream.message(
QUANTIZED_VECTOR_COMPONENT, "Vectors' count:" + vectorCount);
}
return mergeOneFieldToIndex(segmentWriteState, fieldInfo, mergeState, centroid, cDotC);
}
private CloseableRandomVectorScorerSupplier mergeOneFieldToIndex(
SegmentWriteState segmentWriteState,
FieldInfo fieldInfo,
MergeState mergeState,
float[] centroid,
float cDotC)
throws IOException {
long vectorDataOffset = vectorData.alignFilePointer(Float.BYTES);
IndexOutput tempQuantizedVectorData = null;
IndexOutput tempScoreQuantizedVectorData = null;
IndexInput quantizedDataInput = null;
IndexInput quantizedScoreDataInput = null;
OptimizedScalarQuantizer quantizer =
new OptimizedScalarQuantizer(fieldInfo.getVectorSimilarityFunction());
try {
tempQuantizedVectorData =
segmentWriteState.directory.createTempOutput(
vectorData.getName(), "temp", segmentWriteState.context);
final String tempQuantizedVectorName = tempQuantizedVectorData.getName();
final String tempScoreQuantizedVectorName;
if (encoding.isAsymmetric()) {
tempScoreQuantizedVectorData =
segmentWriteState.directory.createTempOutput(
vectorData.getName() + "_score", "temp", segmentWriteState.context);
tempScoreQuantizedVectorName = tempScoreQuantizedVectorData.getName();
} else {
tempScoreQuantizedVectorName = null;
}
FloatVectorValues floatVectorValues =
MergedVectorValues.mergeFloatVectorValues(fieldInfo, mergeState);
if (fieldInfo.getVectorSimilarityFunction() == COSINE) {
floatVectorValues = new NormalizedFloatVectorValues(floatVectorValues);
}
DocsWithFieldSet docsWithField =
encoding.isAsymmetric()
? writeBinarizedVectorAndQueryData(
tempQuantizedVectorData,
encoding,
tempScoreQuantizedVectorData,
floatVectorValues,
centroid,
quantizer)
: writeVectorData(
tempQuantizedVectorData,
new QuantizedFloatVectorValues(floatVectorValues, quantizer, encoding, centroid));
CodecUtil.writeFooter(tempQuantizedVectorData);
IOUtils.close(tempQuantizedVectorData);
quantizedDataInput =
segmentWriteState.directory.openInput(tempQuantizedVectorName, segmentWriteState.context);
vectorData.copyBytes(
quantizedDataInput, quantizedDataInput.length() - CodecUtil.footerLength());
long vectorDataLength = vectorData.getFilePointer() - vectorDataOffset;
CodecUtil.retrieveChecksum(quantizedDataInput);
if (tempScoreQuantizedVectorData != null) {
CodecUtil.writeFooter(tempScoreQuantizedVectorData);
IOUtils.close(tempScoreQuantizedVectorData);
quantizedScoreDataInput =
segmentWriteState.directory.openInput(
tempScoreQuantizedVectorName, segmentWriteState.context);
}
writeMeta(
fieldInfo,
segmentWriteState.segmentInfo.maxDoc(),
vectorDataOffset,
vectorDataLength,
centroid,
cDotC,
docsWithField);
final IndexInput finalQuantizedDataInput = quantizedDataInput;
final IndexInput finalQuantizedScoreDataInput = quantizedScoreDataInput;
tempQuantizedVectorData = null;
tempScoreQuantizedVectorData = null;
quantizedDataInput = null;
quantizedScoreDataInput = null;
OffHeapScalarQuantizedVectorValues vectorValues =
new OffHeapScalarQuantizedVectorValues.DenseOffHeapVectorValues(
fieldInfo.getVectorDimension(),
docsWithField.cardinality(),
centroid,
cDotC,
quantizer,
encoding,
fieldInfo.getVectorSimilarityFunction(),
vectorsScorer,
finalQuantizedDataInput);
OffHeapScalarQuantizedVectorValues scoreVectorValues = null;
if (finalQuantizedScoreDataInput != null) {
scoreVectorValues =
new OffHeapScalarQuantizedVectorValues.DenseOffHeapVectorValues(
true,
fieldInfo.getVectorDimension(),
docsWithField.cardinality(),
centroid,
cDotC,
quantizer,
encoding,
fieldInfo.getVectorSimilarityFunction(),
vectorsScorer,
finalQuantizedScoreDataInput);
}
RandomVectorScorerSupplier scorerSupplier =
scoreVectorValues == null
? vectorsScorer.getRandomVectorScorerSupplier(
fieldInfo.getVectorSimilarityFunction(), vectorValues)
: vectorsScorer.getRandomVectorScorerSupplier(
fieldInfo.getVectorSimilarityFunction(), scoreVectorValues, vectorValues);
return new QuantizedCloseableRandomVectorScorerSupplier(
scorerSupplier,
vectorValues,
() -> {
IOUtils.close(finalQuantizedDataInput, finalQuantizedScoreDataInput);
if (tempScoreQuantizedVectorName != null) {
IOUtils.deleteFilesIgnoringExceptions(
segmentWriteState.directory, tempScoreQuantizedVectorName);
}
IOUtils.deleteFilesIgnoringExceptions(
segmentWriteState.directory, tempQuantizedVectorName);
});
} catch (Throwable t) {
IOUtils.closeWhileSuppressingExceptions(
t,
tempQuantizedVectorData,
tempScoreQuantizedVectorData,
quantizedDataInput,
quantizedScoreDataInput);
if (tempQuantizedVectorData != null) {
IOUtils.deleteFilesSuppressingExceptions(
t, segmentWriteState.directory, tempQuantizedVectorData.getName());
}
if (tempScoreQuantizedVectorData != null) {
IOUtils.deleteFilesSuppressingExceptions(
t, segmentWriteState.directory, tempScoreQuantizedVectorData.getName());
}
throw t;
}
}
@Override
public void close() throws IOException {
IOUtils.close(meta, vectorData, rawVectorDelegate);
}
static float[] getCentroid(KnnVectorsReader vectorsReader, String fieldName) {
vectorsReader = vectorsReader.unwrapReaderForField(fieldName);
if (vectorsReader instanceof Lucene104ScalarQuantizedVectorsReader reader) {
return reader.getCentroid(fieldName);
}
return null;
}
static int mergeAndRecalculateCentroids(
MergeState mergeState, FieldInfo fieldInfo, float[] mergedCentroid) throws IOException {
boolean recalculate = false;
int totalVectorCount = 0;
for (int i = 0; i < mergeState.knnVectorsReaders.length; i++) {
KnnVectorsReader knnVectorsReader = mergeState.knnVectorsReaders[i];
if (knnVectorsReader == null
|| knnVectorsReader.getFloatVectorValues(fieldInfo.name) == null) {
continue;
}
float[] centroid = getCentroid(knnVectorsReader, fieldInfo.name);
int vectorCount = knnVectorsReader.getFloatVectorValues(fieldInfo.name).size();
if (vectorCount == 0) {
continue;
}
totalVectorCount += vectorCount;
// If there aren't centroids, or previously clustered with more than one cluster
// or if there are deleted docs, we must recalculate the centroid
if (centroid == null || mergeState.liveDocs[i] != null) {
recalculate = true;
break;
}
for (int j = 0; j < centroid.length; j++) {
mergedCentroid[j] += centroid[j] * vectorCount;
}
}
if (totalVectorCount == 0) {
return 0;
} else if (recalculate) {
return calculateCentroid(mergeState, fieldInfo, mergedCentroid);
} else {
for (int j = 0; j < mergedCentroid.length; j++) {
mergedCentroid[j] = mergedCentroid[j] / totalVectorCount;
}
if (fieldInfo.getVectorSimilarityFunction() == COSINE) {
VectorUtil.l2normalize(mergedCentroid);
}
return totalVectorCount;
}
}
static int calculateCentroid(MergeState mergeState, FieldInfo fieldInfo, float[] centroid)
throws IOException {
assert fieldInfo.getVectorEncoding().equals(VectorEncoding.FLOAT32);
// clear out the centroid
Arrays.fill(centroid, 0);
int count = 0;
for (int i = 0; i < mergeState.knnVectorsReaders.length; i++) {
KnnVectorsReader knnVectorsReader = mergeState.knnVectorsReaders[i];
if (knnVectorsReader == null) continue;
FloatVectorValues vectorValues =
mergeState.knnVectorsReaders[i].getFloatVectorValues(fieldInfo.name);
if (vectorValues == null) {
continue;
}
KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator();
for (int doc = iterator.nextDoc();
doc != DocIdSetIterator.NO_MORE_DOCS;
doc = iterator.nextDoc()) {
++count;
float[] vector = vectorValues.vectorValue(iterator.index());
for (int j = 0; j < vector.length; j++) {
centroid[j] += vector[j];
}
}
}
if (count == 0) {
return count;
}
for (int i = 0; i < centroid.length; i++) {
centroid[i] /= count;
}
if (fieldInfo.getVectorSimilarityFunction() == COSINE) {
VectorUtil.l2normalize(centroid);
}
return count;
}
@Override
public long ramBytesUsed() {
long total = SHALLOW_RAM_BYTES_USED;
for (FieldWriter field : fields) {
// the field tracks the delegate field usage
total += field.ramBytesUsed();
}
return total;
}
static class FieldWriter extends FlatFieldVectorsWriter<float[]> {
private static final long SHALLOW_SIZE = shallowSizeOfInstance(FieldWriter.class);
private final FieldInfo fieldInfo;
private boolean finished;
private final FlatFieldVectorsWriter<float[]> flatFieldVectorsWriter;
private final float[] dimensionSums;
private final FloatArrayList magnitudes = new FloatArrayList();
FieldWriter(FieldInfo fieldInfo, FlatFieldVectorsWriter<float[]> flatFieldVectorsWriter) {
this.fieldInfo = fieldInfo;
this.flatFieldVectorsWriter = flatFieldVectorsWriter;
this.dimensionSums = new float[fieldInfo.getVectorDimension()];
}
@Override
public List<float[]> getVectors() {
return flatFieldVectorsWriter.getVectors();
}
public void normalizeVectors() {
for (int i = 0; i < flatFieldVectorsWriter.getVectors().size(); i++) {
float[] vector = flatFieldVectorsWriter.getVectors().get(i);
float magnitude = magnitudes.get(i);
for (int j = 0; j < vector.length; j++) {
vector[j] /= magnitude;
}
}
}
@Override
public DocsWithFieldSet getDocsWithFieldSet() {
return flatFieldVectorsWriter.getDocsWithFieldSet();
}
@Override
public void finish() throws IOException {
if (finished) {
return;
}
assert flatFieldVectorsWriter.isFinished();
finished = true;
}
@Override
public boolean isFinished() {
return finished && flatFieldVectorsWriter.isFinished();
}
@Override
public void addValue(int docID, float[] vectorValue) throws IOException {
flatFieldVectorsWriter.addValue(docID, vectorValue);
if (fieldInfo.getVectorSimilarityFunction() == COSINE) {
float dp = VectorUtil.dotProduct(vectorValue, vectorValue);
float divisor = (float) Math.sqrt(dp);
magnitudes.add(divisor);
for (int i = 0; i < vectorValue.length; i++) {
dimensionSums[i] += (vectorValue[i] / divisor);
}
} else {
for (int i = 0; i < vectorValue.length; i++) {
dimensionSums[i] += vectorValue[i];
}
}
}
@Override
public float[] copyValue(float[] vectorValue) {
throw new UnsupportedOperationException();
}
@Override
public long ramBytesUsed() {
long size = SHALLOW_SIZE;
size += flatFieldVectorsWriter.ramBytesUsed();
size += magnitudes.ramBytesUsed();
return size;
}
}
static class QuantizedFloatVectorValues extends QuantizedByteVectorValues {
private OptimizedScalarQuantizer.QuantizationResult corrections;
private final byte[] quantized;
private final byte[] packed;
private final float[] centroid;
private final float centroidDP;
private final FloatVectorValues values;
private final OptimizedScalarQuantizer quantizer;
private final ScalarEncoding encoding;
private int lastOrd = -1;
QuantizedFloatVectorValues(
FloatVectorValues delegate,
OptimizedScalarQuantizer quantizer,
ScalarEncoding encoding,
float[] centroid) {
this.values = delegate;
this.quantizer = quantizer;
this.encoding = encoding;
this.quantized = new byte[encoding.getDiscreteDimensions(delegate.dimension())];
this.packed =
switch (encoding) {
case UNSIGNED_BYTE, SEVEN_BIT -> this.quantized;
case PACKED_NIBBLE, SINGLE_BIT_QUERY_NIBBLE ->
new byte[encoding.getDocPackedLength(quantized.length)];
};
this.centroid = centroid;
this.centroidDP = VectorUtil.dotProduct(centroid, centroid);
}
@Override
public OptimizedScalarQuantizer.QuantizationResult getCorrectiveTerms(int ord) {
if (ord != lastOrd) {
throw new IllegalStateException(
"attempt to retrieve corrective terms for different ord "
+ ord
+ " than the quantization was done for: "
+ lastOrd);
}
return corrections;
}
@Override
public byte[] vectorValue(int ord) throws IOException {
if (ord != lastOrd) {
quantize(ord);
lastOrd = ord;
}
return packed;
}
@Override
public int dimension() {
return values.dimension();
}
@Override
public OptimizedScalarQuantizer getQuantizer() {
throw new UnsupportedOperationException();
}
@Override
public ScalarEncoding getScalarEncoding() {
return encoding;
}
@Override
public float[] getCentroid() throws IOException {
return centroid;
}
@Override
public float getCentroidDP() {
return centroidDP;
}
@Override
public int size() {
return values.size();
}
@Override
public VectorScorer scorer(float[] target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public QuantizedByteVectorValues copy() throws IOException {
return new QuantizedFloatVectorValues(values.copy(), quantizer, encoding, centroid);
}
private void quantize(int ord) throws IOException {
corrections =
quantizer.scalarQuantize(
values.vectorValue(ord), quantized, encoding.getBits(), centroid);
switch (encoding) {
case PACKED_NIBBLE -> OffHeapScalarQuantizedVectorValues.packNibbles(quantized, packed);
case SINGLE_BIT_QUERY_NIBBLE -> OptimizedScalarQuantizer.packAsBinary(quantized, packed);
case UNSIGNED_BYTE, SEVEN_BIT -> {}
}
}
@Override
public DocIndexIterator iterator() {
return values.iterator();
}
@Override
public int ordToDoc(int ord) {
return values.ordToDoc(ord);
}
}
static class QuantizedCloseableRandomVectorScorerSupplier
implements CloseableRandomVectorScorerSupplier {
private final RandomVectorScorerSupplier supplier;
private final KnnVectorValues vectorValues;
private final Closeable onClose;
QuantizedCloseableRandomVectorScorerSupplier(
RandomVectorScorerSupplier supplier, KnnVectorValues vectorValues, Closeable onClose) {
this.supplier = supplier;
this.onClose = onClose;
this.vectorValues = vectorValues;
}
@Override
public UpdateableRandomVectorScorer scorer() throws IOException {
return supplier.scorer();
}
@Override
public RandomVectorScorerSupplier copy() throws IOException {
return supplier.copy();
}
@Override
public void close() throws IOException {
onClose.close();
}
@Override
public int totalVectorCount() {
return vectorValues.size();
}
}
static final class NormalizedFloatVectorValues extends FloatVectorValues {
private final FloatVectorValues values;
private final float[] normalizedVector;
NormalizedFloatVectorValues(FloatVectorValues values) {
this.values = values;
this.normalizedVector = new float[values.dimension()];
}
@Override
public int dimension() {
return values.dimension();
}
@Override
public int size() {
return values.size();
}
@Override
public int ordToDoc(int ord) {
return values.ordToDoc(ord);
}
@Override
public float[] vectorValue(int ord) throws IOException {
System.arraycopy(values.vectorValue(ord), 0, normalizedVector, 0, normalizedVector.length);
VectorUtil.l2normalize(normalizedVector);
return normalizedVector;
}
@Override
public DocIndexIterator iterator() {
return values.iterator();
}
@Override
public NormalizedFloatVectorValues copy() throws IOException {
return new NormalizedFloatVectorValues(values.copy());
}
}
}
|
googleapis/google-cloud-java | 36,993 | java-retail/proto-google-cloud-retail-v2beta/src/main/java/com/google/cloud/retail/v2beta/ListControlsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/retail/v2beta/control_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.retail.v2beta;
/**
*
*
* <pre>
* Request for ListControls method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2beta.ListControlsRequest}
*/
public final class ListControlsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.retail.v2beta.ListControlsRequest)
ListControlsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListControlsRequest.newBuilder() to construct.
private ListControlsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListControlsRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListControlsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2beta.ControlServiceProto
.internal_static_google_cloud_retail_v2beta_ListControlsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2beta.ControlServiceProto
.internal_static_google_cloud_retail_v2beta_ListControlsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2beta.ListControlsRequest.class,
com.google.cloud.retail.v2beta.ListControlsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The catalog resource name. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The catalog resource name. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. Maximum number of results to return. If unspecified, defaults
* to 50. Max allowed value is 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListControls` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListControls` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. A filter to apply on the list results. Supported features:
*
* * List all the products under the parent branch if
* [filter][google.cloud.retail.v2beta.ListControlsRequest.filter] is unset.
* * List controls that are used in a single ServingConfig:
* 'serving_config = "boosted_home_page_cvr"'
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A filter to apply on the list results. Supported features:
*
* * List all the products under the parent branch if
* [filter][google.cloud.retail.v2beta.ListControlsRequest.filter] is unset.
* * List controls that are used in a single ServingConfig:
* 'serving_config = "boosted_home_page_cvr"'
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.retail.v2beta.ListControlsRequest)) {
return super.equals(obj);
}
com.google.cloud.retail.v2beta.ListControlsRequest other =
(com.google.cloud.retail.v2beta.ListControlsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.retail.v2beta.ListControlsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2beta.ListControlsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.ListControlsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2beta.ListControlsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.ListControlsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2beta.ListControlsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.ListControlsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2beta.ListControlsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.ListControlsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2beta.ListControlsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.ListControlsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2beta.ListControlsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.retail.v2beta.ListControlsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for ListControls method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2beta.ListControlsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.retail.v2beta.ListControlsRequest)
com.google.cloud.retail.v2beta.ListControlsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2beta.ControlServiceProto
.internal_static_google_cloud_retail_v2beta_ListControlsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2beta.ControlServiceProto
.internal_static_google_cloud_retail_v2beta_ListControlsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2beta.ListControlsRequest.class,
com.google.cloud.retail.v2beta.ListControlsRequest.Builder.class);
}
// Construct using com.google.cloud.retail.v2beta.ListControlsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.retail.v2beta.ControlServiceProto
.internal_static_google_cloud_retail_v2beta_ListControlsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.retail.v2beta.ListControlsRequest getDefaultInstanceForType() {
return com.google.cloud.retail.v2beta.ListControlsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.retail.v2beta.ListControlsRequest build() {
com.google.cloud.retail.v2beta.ListControlsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.retail.v2beta.ListControlsRequest buildPartial() {
com.google.cloud.retail.v2beta.ListControlsRequest result =
new com.google.cloud.retail.v2beta.ListControlsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.retail.v2beta.ListControlsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.retail.v2beta.ListControlsRequest) {
return mergeFrom((com.google.cloud.retail.v2beta.ListControlsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.retail.v2beta.ListControlsRequest other) {
if (other == com.google.cloud.retail.v2beta.ListControlsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The catalog resource name. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The catalog resource name. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The catalog resource name. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The catalog resource name. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The catalog resource name. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. Maximum number of results to return. If unspecified, defaults
* to 50. Max allowed value is 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. Maximum number of results to return. If unspecified, defaults
* to 50. Max allowed value is 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Maximum number of results to return. If unspecified, defaults
* to 50. Max allowed value is 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListControls` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListControls` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListControls` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListControls` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListControls` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. A filter to apply on the list results. Supported features:
*
* * List all the products under the parent branch if
* [filter][google.cloud.retail.v2beta.ListControlsRequest.filter] is unset.
* * List controls that are used in a single ServingConfig:
* 'serving_config = "boosted_home_page_cvr"'
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A filter to apply on the list results. Supported features:
*
* * List all the products under the parent branch if
* [filter][google.cloud.retail.v2beta.ListControlsRequest.filter] is unset.
* * List controls that are used in a single ServingConfig:
* 'serving_config = "boosted_home_page_cvr"'
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A filter to apply on the list results. Supported features:
*
* * List all the products under the parent branch if
* [filter][google.cloud.retail.v2beta.ListControlsRequest.filter] is unset.
* * List controls that are used in a single ServingConfig:
* 'serving_config = "boosted_home_page_cvr"'
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A filter to apply on the list results. Supported features:
*
* * List all the products under the parent branch if
* [filter][google.cloud.retail.v2beta.ListControlsRequest.filter] is unset.
* * List controls that are used in a single ServingConfig:
* 'serving_config = "boosted_home_page_cvr"'
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A filter to apply on the list results. Supported features:
*
* * List all the products under the parent branch if
* [filter][google.cloud.retail.v2beta.ListControlsRequest.filter] is unset.
* * List controls that are used in a single ServingConfig:
* 'serving_config = "boosted_home_page_cvr"'
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.retail.v2beta.ListControlsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.retail.v2beta.ListControlsRequest)
private static final com.google.cloud.retail.v2beta.ListControlsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.retail.v2beta.ListControlsRequest();
}
public static com.google.cloud.retail.v2beta.ListControlsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListControlsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListControlsRequest>() {
@java.lang.Override
public ListControlsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListControlsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListControlsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.retail.v2beta.ListControlsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,044 | java-configdelivery/proto-google-cloud-configdelivery-v1/src/main/java/com/google/cloud/configdelivery/v1/RolloutStrategy.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/configdelivery/v1/config_delivery.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.configdelivery.v1;
/**
*
*
* <pre>
* RolloutStrategy defines different ways to rollout a resource bundle across
* a set of clusters.
* </pre>
*
* Protobuf type {@code google.cloud.configdelivery.v1.RolloutStrategy}
*/
public final class RolloutStrategy extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.configdelivery.v1.RolloutStrategy)
RolloutStrategyOrBuilder {
private static final long serialVersionUID = 0L;
// Use RolloutStrategy.newBuilder() to construct.
private RolloutStrategy(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private RolloutStrategy() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new RolloutStrategy();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.configdelivery.v1.ConfigDeliveryProto
.internal_static_google_cloud_configdelivery_v1_RolloutStrategy_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.configdelivery.v1.ConfigDeliveryProto
.internal_static_google_cloud_configdelivery_v1_RolloutStrategy_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.configdelivery.v1.RolloutStrategy.class,
com.google.cloud.configdelivery.v1.RolloutStrategy.Builder.class);
}
private int strategyCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object strategy_;
public enum StrategyCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
ALL_AT_ONCE(1),
ROLLING(2),
STRATEGY_NOT_SET(0);
private final int value;
private StrategyCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static StrategyCase valueOf(int value) {
return forNumber(value);
}
public static StrategyCase forNumber(int value) {
switch (value) {
case 1:
return ALL_AT_ONCE;
case 2:
return ROLLING;
case 0:
return STRATEGY_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public StrategyCase getStrategyCase() {
return StrategyCase.forNumber(strategyCase_);
}
public static final int ALL_AT_ONCE_FIELD_NUMBER = 1;
/**
*
*
* <pre>
* AllAtOnceStrategy causes all clusters to be updated concurrently.
* </pre>
*
* <code>.google.cloud.configdelivery.v1.AllAtOnceStrategy all_at_once = 1;</code>
*
* @return Whether the allAtOnce field is set.
*/
@java.lang.Override
public boolean hasAllAtOnce() {
return strategyCase_ == 1;
}
/**
*
*
* <pre>
* AllAtOnceStrategy causes all clusters to be updated concurrently.
* </pre>
*
* <code>.google.cloud.configdelivery.v1.AllAtOnceStrategy all_at_once = 1;</code>
*
* @return The allAtOnce.
*/
@java.lang.Override
public com.google.cloud.configdelivery.v1.AllAtOnceStrategy getAllAtOnce() {
if (strategyCase_ == 1) {
return (com.google.cloud.configdelivery.v1.AllAtOnceStrategy) strategy_;
}
return com.google.cloud.configdelivery.v1.AllAtOnceStrategy.getDefaultInstance();
}
/**
*
*
* <pre>
* AllAtOnceStrategy causes all clusters to be updated concurrently.
* </pre>
*
* <code>.google.cloud.configdelivery.v1.AllAtOnceStrategy all_at_once = 1;</code>
*/
@java.lang.Override
public com.google.cloud.configdelivery.v1.AllAtOnceStrategyOrBuilder getAllAtOnceOrBuilder() {
if (strategyCase_ == 1) {
return (com.google.cloud.configdelivery.v1.AllAtOnceStrategy) strategy_;
}
return com.google.cloud.configdelivery.v1.AllAtOnceStrategy.getDefaultInstance();
}
public static final int ROLLING_FIELD_NUMBER = 2;
/**
*
*
* <pre>
* RollingStrategy causes a specified number of clusters to be updated
* concurrently until all clusters are updated.
* </pre>
*
* <code>.google.cloud.configdelivery.v1.RollingStrategy rolling = 2;</code>
*
* @return Whether the rolling field is set.
*/
@java.lang.Override
public boolean hasRolling() {
return strategyCase_ == 2;
}
/**
*
*
* <pre>
* RollingStrategy causes a specified number of clusters to be updated
* concurrently until all clusters are updated.
* </pre>
*
* <code>.google.cloud.configdelivery.v1.RollingStrategy rolling = 2;</code>
*
* @return The rolling.
*/
@java.lang.Override
public com.google.cloud.configdelivery.v1.RollingStrategy getRolling() {
if (strategyCase_ == 2) {
return (com.google.cloud.configdelivery.v1.RollingStrategy) strategy_;
}
return com.google.cloud.configdelivery.v1.RollingStrategy.getDefaultInstance();
}
/**
*
*
* <pre>
* RollingStrategy causes a specified number of clusters to be updated
* concurrently until all clusters are updated.
* </pre>
*
* <code>.google.cloud.configdelivery.v1.RollingStrategy rolling = 2;</code>
*/
@java.lang.Override
public com.google.cloud.configdelivery.v1.RollingStrategyOrBuilder getRollingOrBuilder() {
if (strategyCase_ == 2) {
return (com.google.cloud.configdelivery.v1.RollingStrategy) strategy_;
}
return com.google.cloud.configdelivery.v1.RollingStrategy.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (strategyCase_ == 1) {
output.writeMessage(1, (com.google.cloud.configdelivery.v1.AllAtOnceStrategy) strategy_);
}
if (strategyCase_ == 2) {
output.writeMessage(2, (com.google.cloud.configdelivery.v1.RollingStrategy) strategy_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (strategyCase_ == 1) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
1, (com.google.cloud.configdelivery.v1.AllAtOnceStrategy) strategy_);
}
if (strategyCase_ == 2) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
2, (com.google.cloud.configdelivery.v1.RollingStrategy) strategy_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.configdelivery.v1.RolloutStrategy)) {
return super.equals(obj);
}
com.google.cloud.configdelivery.v1.RolloutStrategy other =
(com.google.cloud.configdelivery.v1.RolloutStrategy) obj;
if (!getStrategyCase().equals(other.getStrategyCase())) return false;
switch (strategyCase_) {
case 1:
if (!getAllAtOnce().equals(other.getAllAtOnce())) return false;
break;
case 2:
if (!getRolling().equals(other.getRolling())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
switch (strategyCase_) {
case 1:
hash = (37 * hash) + ALL_AT_ONCE_FIELD_NUMBER;
hash = (53 * hash) + getAllAtOnce().hashCode();
break;
case 2:
hash = (37 * hash) + ROLLING_FIELD_NUMBER;
hash = (53 * hash) + getRolling().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.configdelivery.v1.RolloutStrategy parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.configdelivery.v1.RolloutStrategy parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.configdelivery.v1.RolloutStrategy parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.configdelivery.v1.RolloutStrategy parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.configdelivery.v1.RolloutStrategy parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.configdelivery.v1.RolloutStrategy parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.configdelivery.v1.RolloutStrategy parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.configdelivery.v1.RolloutStrategy parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.configdelivery.v1.RolloutStrategy parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.configdelivery.v1.RolloutStrategy parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.configdelivery.v1.RolloutStrategy parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.configdelivery.v1.RolloutStrategy parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.configdelivery.v1.RolloutStrategy prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* RolloutStrategy defines different ways to rollout a resource bundle across
* a set of clusters.
* </pre>
*
* Protobuf type {@code google.cloud.configdelivery.v1.RolloutStrategy}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.configdelivery.v1.RolloutStrategy)
com.google.cloud.configdelivery.v1.RolloutStrategyOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.configdelivery.v1.ConfigDeliveryProto
.internal_static_google_cloud_configdelivery_v1_RolloutStrategy_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.configdelivery.v1.ConfigDeliveryProto
.internal_static_google_cloud_configdelivery_v1_RolloutStrategy_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.configdelivery.v1.RolloutStrategy.class,
com.google.cloud.configdelivery.v1.RolloutStrategy.Builder.class);
}
// Construct using com.google.cloud.configdelivery.v1.RolloutStrategy.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (allAtOnceBuilder_ != null) {
allAtOnceBuilder_.clear();
}
if (rollingBuilder_ != null) {
rollingBuilder_.clear();
}
strategyCase_ = 0;
strategy_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.configdelivery.v1.ConfigDeliveryProto
.internal_static_google_cloud_configdelivery_v1_RolloutStrategy_descriptor;
}
@java.lang.Override
public com.google.cloud.configdelivery.v1.RolloutStrategy getDefaultInstanceForType() {
return com.google.cloud.configdelivery.v1.RolloutStrategy.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.configdelivery.v1.RolloutStrategy build() {
com.google.cloud.configdelivery.v1.RolloutStrategy result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.configdelivery.v1.RolloutStrategy buildPartial() {
com.google.cloud.configdelivery.v1.RolloutStrategy result =
new com.google.cloud.configdelivery.v1.RolloutStrategy(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.configdelivery.v1.RolloutStrategy result) {
int from_bitField0_ = bitField0_;
}
private void buildPartialOneofs(com.google.cloud.configdelivery.v1.RolloutStrategy result) {
result.strategyCase_ = strategyCase_;
result.strategy_ = this.strategy_;
if (strategyCase_ == 1 && allAtOnceBuilder_ != null) {
result.strategy_ = allAtOnceBuilder_.build();
}
if (strategyCase_ == 2 && rollingBuilder_ != null) {
result.strategy_ = rollingBuilder_.build();
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.configdelivery.v1.RolloutStrategy) {
return mergeFrom((com.google.cloud.configdelivery.v1.RolloutStrategy) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.configdelivery.v1.RolloutStrategy other) {
if (other == com.google.cloud.configdelivery.v1.RolloutStrategy.getDefaultInstance())
return this;
switch (other.getStrategyCase()) {
case ALL_AT_ONCE:
{
mergeAllAtOnce(other.getAllAtOnce());
break;
}
case ROLLING:
{
mergeRolling(other.getRolling());
break;
}
case STRATEGY_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getAllAtOnceFieldBuilder().getBuilder(), extensionRegistry);
strategyCase_ = 1;
break;
} // case 10
case 18:
{
input.readMessage(getRollingFieldBuilder().getBuilder(), extensionRegistry);
strategyCase_ = 2;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int strategyCase_ = 0;
private java.lang.Object strategy_;
public StrategyCase getStrategyCase() {
return StrategyCase.forNumber(strategyCase_);
}
public Builder clearStrategy() {
strategyCase_ = 0;
strategy_ = null;
onChanged();
return this;
}
private int bitField0_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.configdelivery.v1.AllAtOnceStrategy,
com.google.cloud.configdelivery.v1.AllAtOnceStrategy.Builder,
com.google.cloud.configdelivery.v1.AllAtOnceStrategyOrBuilder>
allAtOnceBuilder_;
/**
*
*
* <pre>
* AllAtOnceStrategy causes all clusters to be updated concurrently.
* </pre>
*
* <code>.google.cloud.configdelivery.v1.AllAtOnceStrategy all_at_once = 1;</code>
*
* @return Whether the allAtOnce field is set.
*/
@java.lang.Override
public boolean hasAllAtOnce() {
return strategyCase_ == 1;
}
/**
*
*
* <pre>
* AllAtOnceStrategy causes all clusters to be updated concurrently.
* </pre>
*
* <code>.google.cloud.configdelivery.v1.AllAtOnceStrategy all_at_once = 1;</code>
*
* @return The allAtOnce.
*/
@java.lang.Override
public com.google.cloud.configdelivery.v1.AllAtOnceStrategy getAllAtOnce() {
if (allAtOnceBuilder_ == null) {
if (strategyCase_ == 1) {
return (com.google.cloud.configdelivery.v1.AllAtOnceStrategy) strategy_;
}
return com.google.cloud.configdelivery.v1.AllAtOnceStrategy.getDefaultInstance();
} else {
if (strategyCase_ == 1) {
return allAtOnceBuilder_.getMessage();
}
return com.google.cloud.configdelivery.v1.AllAtOnceStrategy.getDefaultInstance();
}
}
/**
*
*
* <pre>
* AllAtOnceStrategy causes all clusters to be updated concurrently.
* </pre>
*
* <code>.google.cloud.configdelivery.v1.AllAtOnceStrategy all_at_once = 1;</code>
*/
public Builder setAllAtOnce(com.google.cloud.configdelivery.v1.AllAtOnceStrategy value) {
if (allAtOnceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
strategy_ = value;
onChanged();
} else {
allAtOnceBuilder_.setMessage(value);
}
strategyCase_ = 1;
return this;
}
/**
*
*
* <pre>
* AllAtOnceStrategy causes all clusters to be updated concurrently.
* </pre>
*
* <code>.google.cloud.configdelivery.v1.AllAtOnceStrategy all_at_once = 1;</code>
*/
public Builder setAllAtOnce(
com.google.cloud.configdelivery.v1.AllAtOnceStrategy.Builder builderForValue) {
if (allAtOnceBuilder_ == null) {
strategy_ = builderForValue.build();
onChanged();
} else {
allAtOnceBuilder_.setMessage(builderForValue.build());
}
strategyCase_ = 1;
return this;
}
/**
*
*
* <pre>
* AllAtOnceStrategy causes all clusters to be updated concurrently.
* </pre>
*
* <code>.google.cloud.configdelivery.v1.AllAtOnceStrategy all_at_once = 1;</code>
*/
public Builder mergeAllAtOnce(com.google.cloud.configdelivery.v1.AllAtOnceStrategy value) {
if (allAtOnceBuilder_ == null) {
if (strategyCase_ == 1
&& strategy_
!= com.google.cloud.configdelivery.v1.AllAtOnceStrategy.getDefaultInstance()) {
strategy_ =
com.google.cloud.configdelivery.v1.AllAtOnceStrategy.newBuilder(
(com.google.cloud.configdelivery.v1.AllAtOnceStrategy) strategy_)
.mergeFrom(value)
.buildPartial();
} else {
strategy_ = value;
}
onChanged();
} else {
if (strategyCase_ == 1) {
allAtOnceBuilder_.mergeFrom(value);
} else {
allAtOnceBuilder_.setMessage(value);
}
}
strategyCase_ = 1;
return this;
}
/**
*
*
* <pre>
* AllAtOnceStrategy causes all clusters to be updated concurrently.
* </pre>
*
* <code>.google.cloud.configdelivery.v1.AllAtOnceStrategy all_at_once = 1;</code>
*/
public Builder clearAllAtOnce() {
if (allAtOnceBuilder_ == null) {
if (strategyCase_ == 1) {
strategyCase_ = 0;
strategy_ = null;
onChanged();
}
} else {
if (strategyCase_ == 1) {
strategyCase_ = 0;
strategy_ = null;
}
allAtOnceBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* AllAtOnceStrategy causes all clusters to be updated concurrently.
* </pre>
*
* <code>.google.cloud.configdelivery.v1.AllAtOnceStrategy all_at_once = 1;</code>
*/
public com.google.cloud.configdelivery.v1.AllAtOnceStrategy.Builder getAllAtOnceBuilder() {
return getAllAtOnceFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* AllAtOnceStrategy causes all clusters to be updated concurrently.
* </pre>
*
* <code>.google.cloud.configdelivery.v1.AllAtOnceStrategy all_at_once = 1;</code>
*/
@java.lang.Override
public com.google.cloud.configdelivery.v1.AllAtOnceStrategyOrBuilder getAllAtOnceOrBuilder() {
if ((strategyCase_ == 1) && (allAtOnceBuilder_ != null)) {
return allAtOnceBuilder_.getMessageOrBuilder();
} else {
if (strategyCase_ == 1) {
return (com.google.cloud.configdelivery.v1.AllAtOnceStrategy) strategy_;
}
return com.google.cloud.configdelivery.v1.AllAtOnceStrategy.getDefaultInstance();
}
}
/**
*
*
* <pre>
* AllAtOnceStrategy causes all clusters to be updated concurrently.
* </pre>
*
* <code>.google.cloud.configdelivery.v1.AllAtOnceStrategy all_at_once = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.configdelivery.v1.AllAtOnceStrategy,
com.google.cloud.configdelivery.v1.AllAtOnceStrategy.Builder,
com.google.cloud.configdelivery.v1.AllAtOnceStrategyOrBuilder>
getAllAtOnceFieldBuilder() {
if (allAtOnceBuilder_ == null) {
if (!(strategyCase_ == 1)) {
strategy_ = com.google.cloud.configdelivery.v1.AllAtOnceStrategy.getDefaultInstance();
}
allAtOnceBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.configdelivery.v1.AllAtOnceStrategy,
com.google.cloud.configdelivery.v1.AllAtOnceStrategy.Builder,
com.google.cloud.configdelivery.v1.AllAtOnceStrategyOrBuilder>(
(com.google.cloud.configdelivery.v1.AllAtOnceStrategy) strategy_,
getParentForChildren(),
isClean());
strategy_ = null;
}
strategyCase_ = 1;
onChanged();
return allAtOnceBuilder_;
}
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.configdelivery.v1.RollingStrategy,
com.google.cloud.configdelivery.v1.RollingStrategy.Builder,
com.google.cloud.configdelivery.v1.RollingStrategyOrBuilder>
rollingBuilder_;
/**
*
*
* <pre>
* RollingStrategy causes a specified number of clusters to be updated
* concurrently until all clusters are updated.
* </pre>
*
* <code>.google.cloud.configdelivery.v1.RollingStrategy rolling = 2;</code>
*
* @return Whether the rolling field is set.
*/
@java.lang.Override
public boolean hasRolling() {
return strategyCase_ == 2;
}
/**
*
*
* <pre>
* RollingStrategy causes a specified number of clusters to be updated
* concurrently until all clusters are updated.
* </pre>
*
* <code>.google.cloud.configdelivery.v1.RollingStrategy rolling = 2;</code>
*
* @return The rolling.
*/
@java.lang.Override
public com.google.cloud.configdelivery.v1.RollingStrategy getRolling() {
if (rollingBuilder_ == null) {
if (strategyCase_ == 2) {
return (com.google.cloud.configdelivery.v1.RollingStrategy) strategy_;
}
return com.google.cloud.configdelivery.v1.RollingStrategy.getDefaultInstance();
} else {
if (strategyCase_ == 2) {
return rollingBuilder_.getMessage();
}
return com.google.cloud.configdelivery.v1.RollingStrategy.getDefaultInstance();
}
}
/**
*
*
* <pre>
* RollingStrategy causes a specified number of clusters to be updated
* concurrently until all clusters are updated.
* </pre>
*
* <code>.google.cloud.configdelivery.v1.RollingStrategy rolling = 2;</code>
*/
public Builder setRolling(com.google.cloud.configdelivery.v1.RollingStrategy value) {
if (rollingBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
strategy_ = value;
onChanged();
} else {
rollingBuilder_.setMessage(value);
}
strategyCase_ = 2;
return this;
}
/**
*
*
* <pre>
* RollingStrategy causes a specified number of clusters to be updated
* concurrently until all clusters are updated.
* </pre>
*
* <code>.google.cloud.configdelivery.v1.RollingStrategy rolling = 2;</code>
*/
public Builder setRolling(
com.google.cloud.configdelivery.v1.RollingStrategy.Builder builderForValue) {
if (rollingBuilder_ == null) {
strategy_ = builderForValue.build();
onChanged();
} else {
rollingBuilder_.setMessage(builderForValue.build());
}
strategyCase_ = 2;
return this;
}
/**
*
*
* <pre>
* RollingStrategy causes a specified number of clusters to be updated
* concurrently until all clusters are updated.
* </pre>
*
* <code>.google.cloud.configdelivery.v1.RollingStrategy rolling = 2;</code>
*/
public Builder mergeRolling(com.google.cloud.configdelivery.v1.RollingStrategy value) {
if (rollingBuilder_ == null) {
if (strategyCase_ == 2
&& strategy_
!= com.google.cloud.configdelivery.v1.RollingStrategy.getDefaultInstance()) {
strategy_ =
com.google.cloud.configdelivery.v1.RollingStrategy.newBuilder(
(com.google.cloud.configdelivery.v1.RollingStrategy) strategy_)
.mergeFrom(value)
.buildPartial();
} else {
strategy_ = value;
}
onChanged();
} else {
if (strategyCase_ == 2) {
rollingBuilder_.mergeFrom(value);
} else {
rollingBuilder_.setMessage(value);
}
}
strategyCase_ = 2;
return this;
}
/**
*
*
* <pre>
* RollingStrategy causes a specified number of clusters to be updated
* concurrently until all clusters are updated.
* </pre>
*
* <code>.google.cloud.configdelivery.v1.RollingStrategy rolling = 2;</code>
*/
public Builder clearRolling() {
if (rollingBuilder_ == null) {
if (strategyCase_ == 2) {
strategyCase_ = 0;
strategy_ = null;
onChanged();
}
} else {
if (strategyCase_ == 2) {
strategyCase_ = 0;
strategy_ = null;
}
rollingBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* RollingStrategy causes a specified number of clusters to be updated
* concurrently until all clusters are updated.
* </pre>
*
* <code>.google.cloud.configdelivery.v1.RollingStrategy rolling = 2;</code>
*/
public com.google.cloud.configdelivery.v1.RollingStrategy.Builder getRollingBuilder() {
return getRollingFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* RollingStrategy causes a specified number of clusters to be updated
* concurrently until all clusters are updated.
* </pre>
*
* <code>.google.cloud.configdelivery.v1.RollingStrategy rolling = 2;</code>
*/
@java.lang.Override
public com.google.cloud.configdelivery.v1.RollingStrategyOrBuilder getRollingOrBuilder() {
if ((strategyCase_ == 2) && (rollingBuilder_ != null)) {
return rollingBuilder_.getMessageOrBuilder();
} else {
if (strategyCase_ == 2) {
return (com.google.cloud.configdelivery.v1.RollingStrategy) strategy_;
}
return com.google.cloud.configdelivery.v1.RollingStrategy.getDefaultInstance();
}
}
/**
*
*
* <pre>
* RollingStrategy causes a specified number of clusters to be updated
* concurrently until all clusters are updated.
* </pre>
*
* <code>.google.cloud.configdelivery.v1.RollingStrategy rolling = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.configdelivery.v1.RollingStrategy,
com.google.cloud.configdelivery.v1.RollingStrategy.Builder,
com.google.cloud.configdelivery.v1.RollingStrategyOrBuilder>
getRollingFieldBuilder() {
if (rollingBuilder_ == null) {
if (!(strategyCase_ == 2)) {
strategy_ = com.google.cloud.configdelivery.v1.RollingStrategy.getDefaultInstance();
}
rollingBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.configdelivery.v1.RollingStrategy,
com.google.cloud.configdelivery.v1.RollingStrategy.Builder,
com.google.cloud.configdelivery.v1.RollingStrategyOrBuilder>(
(com.google.cloud.configdelivery.v1.RollingStrategy) strategy_,
getParentForChildren(),
isClean());
strategy_ = null;
}
strategyCase_ = 2;
onChanged();
return rollingBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.configdelivery.v1.RolloutStrategy)
}
// @@protoc_insertion_point(class_scope:google.cloud.configdelivery.v1.RolloutStrategy)
private static final com.google.cloud.configdelivery.v1.RolloutStrategy DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.configdelivery.v1.RolloutStrategy();
}
public static com.google.cloud.configdelivery.v1.RolloutStrategy getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<RolloutStrategy> PARSER =
new com.google.protobuf.AbstractParser<RolloutStrategy>() {
@java.lang.Override
public RolloutStrategy parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<RolloutStrategy> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<RolloutStrategy> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.configdelivery.v1.RolloutStrategy getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/drill | 37,225 | exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.store.dfs;
import static java.util.Collections.unmodifiableList;
import static org.apache.drill.exec.dotdrill.DotDrillType.STATS;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ThreadLocalRandom;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.calcite.schema.Function;
import org.apache.calcite.schema.Table;
import org.apache.commons.lang3.SystemUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.drill.common.exceptions.DrillRuntimeException;
import org.apache.drill.common.exceptions.ExecutionSetupException;
import org.apache.drill.common.exceptions.UserException;
import org.apache.drill.common.logical.FormatPluginConfig;
import org.apache.drill.common.scanner.persistence.ScanResult;
import org.apache.drill.common.util.DrillStringUtils;
import org.apache.drill.common.util.function.CheckedFunction;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.dotdrill.DotDrillFile;
import org.apache.drill.exec.dotdrill.DotDrillType;
import org.apache.drill.exec.dotdrill.DotDrillUtil;
import org.apache.drill.exec.dotdrill.View;
import org.apache.drill.exec.metastore.store.FileSystemMetadataProviderManager;
import org.apache.drill.exec.metastore.MetadataProviderManager;
import org.apache.drill.exec.metastore.MetastoreMetadataProviderManager;
import org.apache.drill.exec.metastore.MetastoreMetadataProviderManager.MetastoreMetadataProviderConfig;
import org.apache.drill.exec.planner.common.DrillStatsTable;
import org.apache.drill.exec.planner.logical.CreateTableEntry;
import org.apache.drill.exec.planner.logical.DrillTable;
import org.apache.drill.exec.planner.logical.DrillViewTable;
import org.apache.drill.exec.planner.logical.DynamicDrillTable;
import org.apache.drill.exec.planner.logical.FileSystemCreateTableEntry;
import org.apache.drill.exec.planner.sql.ExpandingConcurrentMap;
import org.apache.drill.exec.planner.sql.SchemaUtilities;
import org.apache.drill.exec.record.metadata.schema.FsMetastoreSchemaProvider;
import org.apache.drill.exec.store.AbstractSchema;
import org.apache.drill.exec.store.PartitionNotFoundException;
import org.apache.drill.exec.store.SchemaConfig;
import org.apache.drill.exec.store.easy.json.JSONFormatPlugin;
import org.apache.drill.exec.store.table.function.TableParamDef;
import org.apache.drill.exec.store.table.function.TableSignature;
import org.apache.drill.exec.store.table.function.WithOptionsTableMacro;
import org.apache.drill.exec.util.DrillFileSystemUtil;
import org.apache.drill.exec.store.StorageStrategy;
import org.apache.drill.exec.util.ImpersonationUtil;
import org.apache.drill.metastore.MetastoreRegistry;
import org.apache.drill.metastore.components.tables.MetastoreTableInfo;
import org.apache.drill.metastore.exceptions.MetastoreException;
import org.apache.drill.metastore.metadata.TableInfo;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.security.AccessControlException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Joiner;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Sets;
public class WorkspaceSchemaFactory {
private static final Logger logger = LoggerFactory.getLogger(WorkspaceSchemaFactory.class);
private final List<FormatMatcher> fileMatchers;
private final List<FormatMatcher> dropFileMatchers;
private final List<FormatMatcher> dirMatchers;
private final List<FormatLocationTransformer> formatLocationTransformers;
private final WorkspaceConfig config;
private final Configuration fsConf;
private final String storageEngineName;
private final String schemaName;
private final FileSystemPlugin plugin;
private final ObjectMapper mapper;
private final Path wsPath;
private final FormatPluginOptionExtractor optionExtractor;
public WorkspaceSchemaFactory(
FileSystemPlugin plugin,
String schemaName,
String storageEngineName,
WorkspaceConfig config,
List<FormatMatcher> formatMatchers,
ObjectMapper mapper,
ScanResult scanResult) throws ExecutionSetupException {
this.mapper = mapper;
this.fsConf = plugin.getFsConf();
this.plugin = plugin;
this.config = config;
this.fileMatchers = new ArrayList<>();
this.dirMatchers = new ArrayList<>();
this.formatLocationTransformers = new ArrayList<>();
this.storageEngineName = storageEngineName;
this.schemaName = schemaName;
this.wsPath = new Path(config.getLocation());
this.optionExtractor = new FormatPluginOptionExtractor(scanResult);
for (FormatMatcher m : formatMatchers) {
if (m.supportDirectoryReads()) {
dirMatchers.add(m);
}
fileMatchers.add(m);
FormatLocationTransformer transformer = m.getFormatLocationTransformer();
if (transformer != null) {
formatLocationTransformers.add(transformer);
}
}
// NOTE: Add fallback format matcher if given in the configuration. Make sure fileMatchers is an order-preserving list.
final String defaultInputFormat = config.getDefaultInputFormat();
if (!Strings.isNullOrEmpty(defaultInputFormat)) {
final FormatPlugin formatPlugin = plugin.getFormatPlugin(defaultInputFormat);
if (formatPlugin == null) {
final String message = String.format("Unable to find default input format[%s] for workspace[%s.%s]",
defaultInputFormat, storageEngineName, schemaName);
throw new ExecutionSetupException(message);
}
final FormatMatcher fallbackMatcher = new BasicFormatMatcher(formatPlugin,
ImmutableList.of(Pattern.compile(".*")), ImmutableList.of());
fileMatchers.add(fallbackMatcher);
dropFileMatchers = fileMatchers.subList(0, fileMatchers.size() - 1);
} else {
dropFileMatchers = fileMatchers.subList(0, fileMatchers.size());
}
}
/**
* Checks whether the given user has permission to list files/directories under the workspace directory.
*
* @param userName User who is trying to access the workspace.
* @return True if the user has access. False otherwise.
*/
public boolean accessible(final String userName) throws IOException {
final DrillFileSystem fs = ImpersonationUtil.createFileSystem(userName, fsConf);
return accessible(fs);
}
/**
* Checks whether a FileSystem object has the permission to list/read workspace directory
* @param fs a DrillFileSystem object that was created with certain user privilege
* @return True if the user has access. False otherwise.
*/
public boolean accessible(DrillFileSystem fs) throws IOException {
try {
/*
* For Windows local file system, fs.access ends up using DeprecatedRawLocalFileStatus which has
* TrustedInstaller as owner, and a member of Administrators group could not satisfy the permission.
* In this case, we will still use method listStatus.
* In other cases, we use access method since it is cheaper.
*/
if (SystemUtils.IS_OS_WINDOWS && fs.getUri().getScheme().equalsIgnoreCase(FileSystemSchemaFactory.LOCAL_FS_SCHEME)) {
fs.listStatus(wsPath);
}
else {
fs.access(wsPath, FsAction.READ);
}
} catch (final UnsupportedOperationException e) {
logger.trace("The filesystem for this workspace does not support this operation.", e);
} catch (final FileNotFoundException | AccessControlException e) {
return false;
}
return true;
}
private Path getViewPath(String name) {
return DotDrillType.VIEW.getPath(config.getLocation(), name);
}
public WorkspaceSchema createSchema(List<String> parentSchemaPath, SchemaConfig schemaConfig, DrillFileSystem fs) throws IOException {
if (!accessible(fs)) {
return null;
}
return new WorkspaceSchema(parentSchemaPath, schemaName, schemaConfig, fs, config);
}
public String getSchemaName() {
return schemaName;
}
public FileSystemPlugin getPlugin() {
return plugin;
}
// Ensure given tableName is not a stats table
private static void ensureNotStatsTable(final String tableName) {
if (tableName.toLowerCase().endsWith(STATS.getEnding())) {
throw UserException
.validationError()
.message("Given table [%s] is already a stats table. " +
"Cannot perform stats operations on a stats table.", tableName)
.build(logger);
}
}
private static Object[] array(Object... objects) {
return objects;
}
public static final class TableInstance {
final TableSignature sig;
final List<Object> params;
public TableInstance(TableSignature sig, List<Object> params) {
if (params.size() != sig.getParams().size()) {
throw UserException.parseError()
.message(
"should have as many params (%d) as signature (%d)",
params.size(), sig.getParams().size())
.addContext("table", sig.getName())
.build(logger);
}
this.sig = sig;
this.params = unmodifiableList(params);
}
String presentParams() {
StringBuilder sb = new StringBuilder("(");
boolean first = true;
for (int i = 0; i < params.size(); i++) {
Object param = params.get(i);
if (param != null) {
if (first) {
first = false;
} else {
sb.append(", ");
}
TableParamDef paramDef = sig.getParams().get(i);
sb.append(paramDef.getName()).append(": ").append(paramDef.getType().getSimpleName()).append(" => ").append(param);
}
}
sb.append(")");
return sb.toString();
}
private Object[] toArray() {
return array(sig, params);
}
@Override
public int hashCode() {
return Arrays.hashCode(toArray());
}
@Override
public boolean equals(Object obj) {
if (obj instanceof TableInstance) {
return Arrays.equals(this.toArray(), ((TableInstance)obj).toArray());
}
return false;
}
@Override
public String toString() {
return sig.getName() + (params.size() == 0 ? "" : presentParams());
}
}
public class WorkspaceSchema extends AbstractSchema implements ExpandingConcurrentMap.MapValueFactory<TableInstance, DrillTable> {
private final ExpandingConcurrentMap<TableInstance, DrillTable> tables = new ExpandingConcurrentMap<>(this);
private final SchemaConfig schemaConfig;
private final DrillFileSystem fs;
// Drill Process User file-system
private final DrillFileSystem dpsFs;
private final WorkspaceConfig wsConfig;
public WorkspaceSchema(
List<String> parentSchemaPath,
String wsName,
SchemaConfig schemaConfig,
DrillFileSystem fs,
WorkspaceConfig config
) {
super(parentSchemaPath, wsName);
this.schemaConfig = schemaConfig;
this.fs = fs;
this.dpsFs = ImpersonationUtil.createFileSystem(ImpersonationUtil.getProcessUserName(), fsConf);
this.wsConfig = config;
}
DrillTable getDrillTable(TableInstance key) {
return tables.get(key);
}
@Override
public boolean createView(View view) throws IOException {
Path viewPath = getViewPath(view.getName());
boolean replaced = getFS().exists(viewPath);
final FsPermission viewPerms =
new FsPermission(schemaConfig.getOption(ExecConstants.NEW_VIEW_DEFAULT_PERMS_KEY).string_val);
try (OutputStream stream = DrillFileSystem.create(getFS(), viewPath, viewPerms)) {
mapper.writeValue(stream, view);
}
return replaced;
}
@Override
public Iterable<String> getSubPartitions(String table,
List<String> partitionColumns,
List<String> partitionValues
) throws PartitionNotFoundException {
List<FileStatus> fileStatuses;
try {
fileStatuses = DrillFileSystemUtil.listDirectories(getFS(), new Path(getDefaultLocation(), table), false);
} catch (IOException e) {
throw new PartitionNotFoundException("Error finding partitions for table " + table, e);
}
return new SubDirectoryList(fileStatuses);
}
@Override
public void dropView(String viewName) throws IOException {
getFS().delete(getViewPath(viewName), false);
}
private Set<String> getViews() {
Set<String> viewSet = Sets.newHashSet();
// Look for files with ".view.drill" extension.
List<DotDrillFile> files;
try {
files = DotDrillUtil.getDotDrills(getFS(), new Path(config.getLocation()), DotDrillType.VIEW);
for (DotDrillFile f : files) {
viewSet.add(f.getBaseName());
}
} catch (UnsupportedOperationException e) {
logger.debug("The filesystem for this workspace does not support this operation.", e);
} catch (AccessControlException e) {
if (!schemaConfig.getIgnoreAuthErrors()) {
logger.debug(e.getMessage());
throw UserException
.permissionError(e)
.message("Not authorized to list view tables in schema [%s]", getFullSchemaName())
.build(logger);
}
} catch (Exception e) {
logger.warn("Failure while trying to list .view.drill files in workspace [{}]", getFullSchemaName(), e);
}
return viewSet;
}
private Set<String> rawTableNames() {
return tables.keySet().stream()
.map(input -> input.sig.getName())
.collect(Collectors.toSet());
}
@Override
public Set<String> getTableNames() {
return Sets.union(rawTableNames(), getViews());
}
@Override
public Set<String> getFunctionNames() {
return rawTableNames();
}
@Override
public List<Function> getFunctions(String name) {
// add parent functions first
List<Function> functions = new ArrayList<>(super.getFunctions(name));
List<TableParamDef> tableParameters = getFunctionParameters();
List<TableSignature> signatures = optionExtractor.getTableSignatures(name, tableParameters);
signatures.stream()
.map(signature -> new WithOptionsTableMacro(signature, params -> getDrillTable(new TableInstance(signature, params))))
.forEach(functions::add);
return functions;
}
private View getView(DotDrillFile f) throws IOException {
assert f.getType() == DotDrillType.VIEW;
return f.getView(mapper);
}
private String getTemporaryName(String name) {
if (isTemporaryWorkspace()) {
String tableName = DrillStringUtils.removeLeadingSlash(name);
return schemaConfig.getTemporaryTableName(tableName);
}
return null;
}
private boolean isTemporaryWorkspace() {
return SchemaUtilities.getSchemaPath(schemaPath).equals(schemaConfig.getTemporaryWorkspace());
}
@Override
public Table getTable(String tableName) {
String temporaryName = getTemporaryName(tableName);
if (temporaryName != null) {
tableName = temporaryName;
}
TableInstance tableKey = new TableInstance(TableSignature.of(tableName), ImmutableList.of());
// first check existing tables.
if (tables.alreadyContainsKey(tableKey)) {
return tables.get(tableKey);
}
// then look for files that start with this name and end in .drill.
List<DotDrillFile> files = Collections.emptyList();
try {
try {
files = DotDrillUtil.getDotDrills(getFS(), new Path(config.getLocation()),
DrillStringUtils.removeLeadingSlash(tableName), DotDrillType.VIEW);
} catch (AccessControlException e) {
if (!schemaConfig.getIgnoreAuthErrors()) {
logger.debug(e.getMessage());
throw UserException.permissionError(e)
.message("Not authorized to list or query tables in schema [%s]", getFullSchemaName())
.build(logger);
}
} catch (IOException e) {
logger.warn("Failure while trying to list view tables in workspace [{}]", getFullSchemaName(), e);
}
for (DotDrillFile f : files) {
switch (f.getType()) {
case VIEW:
try {
return new DrillViewTable(getView(f), f.getOwner(), schemaConfig.getViewExpansionContext());
} catch (AccessControlException e) {
if (!schemaConfig.getIgnoreAuthErrors()) {
logger.debug(e.getMessage());
throw UserException.permissionError(e)
.message("Not authorized to read view [%s] in schema [%s]", tableName, getFullSchemaName())
.build(logger);
}
} catch (IOException e) {
logger.warn("Failure while trying to load {}.view.drill file in workspace [{}]", tableName, getFullSchemaName(), e);
}
default:
}
}
} catch (UnsupportedOperationException e) {
logger.debug("The filesystem for this workspace does not support this operation.", e);
}
DrillTable table = tables.get(tableKey);
setMetadataProviderManager(table, tableName);
return table;
}
private void setSchema(MetadataProviderManager providerManager, String tableName) {
if (schemaConfig.getOption(ExecConstants.STORE_TABLE_USE_SCHEMA_FILE).bool_val) {
try {
FsMetastoreSchemaProvider schemaProvider = new FsMetastoreSchemaProvider(this, tableName);
providerManager.setSchemaProvider(schemaProvider);
} catch (IOException e) {
logger.debug("Unable to init schema provider for table [{}]", tableName, e);
}
}
}
private void setMetadataTable(MetadataProviderManager metadataProviderManager, DrillTable table, final String tableName) {
// If this itself is the stats table, then skip it.
if (tableName.toLowerCase().endsWith(STATS.getEnding())) {
return;
}
try {
String statsTableName = getStatsTableName(tableName);
Path statsTableFilePath = getStatsTableFilePath(tableName);
metadataProviderManager.setStatsProvider(new DrillStatsTable(table, getFullSchemaName(), statsTableName,
statsTableFilePath, fs));
} catch (Exception e) {
logger.warn("Failed to find the stats table for table [{}] in schema [{}]",
tableName, getFullSchemaName());
}
}
// Get stats table name for a given table name.
private String getStatsTableName(final String tableName) {
// Access stats file as DRILL process user (not impersonated user)
final Path tablePath = new Path(config.getLocation(), tableName);
try {
String name;
if (dpsFs.isDirectory(tablePath)) {
name = tableName + Path.SEPARATOR + STATS.getEnding();
if (dpsFs.isDirectory(new Path(name))) {
return name;
}
} else {
//TODO: Not really useful. Remove?
name = tableName + STATS.getEnding();
if (dpsFs.isFile(new Path(name))) {
return name;
}
}
return name;
} catch (final Exception e) {
throw new DrillRuntimeException(
String.format("Failed to find the stats for table [%s] in schema [%s]",
tableName, getFullSchemaName()));
}
}
// Get stats table file (JSON) path for the given table name.
private Path getStatsTableFilePath(final String tableName) {
// Access stats file as DRILL process user (not impersonated user)
final Path tablePath = new Path(config.getLocation(), tableName);
try {
Path stFPath = null;
if (dpsFs.isDirectory(tablePath)) {
stFPath = new Path(tablePath, STATS.getEnding()+ Path.SEPARATOR + "0_0.json");
if (dpsFs.isFile(stFPath)) {
return stFPath;
}
}
return stFPath;
} catch (final Exception e) {
throw new DrillRuntimeException(
String.format("Failed to find the the stats for table [%s] in schema [%s]",
tableName, getFullSchemaName()));
}
}
@Override
public boolean isMutable() {
return config.isWritable();
}
public DrillFileSystem getFS() {
return fs;
}
public String getDefaultLocation() {
return config.getLocation();
}
@Override
public CreateTableEntry createNewTable(String tableName, List<String> partitionColumns, StorageStrategy storageStrategy) {
String storage = schemaConfig.getOption(ExecConstants.OUTPUT_FORMAT_OPTION).string_val;
FormatPlugin formatPlugin = plugin.getFormatPlugin(storage);
return createOrAppendToTable(tableName, formatPlugin, partitionColumns, storageStrategy);
}
@Override
public CreateTableEntry createStatsTable(String tableName) {
ensureNotStatsTable(tableName);
final String statsTableName = getStatsTableName(tableName);
FormatPlugin formatPlugin = plugin.getFormatPlugin(JSONFormatPlugin.PLUGIN_NAME);
return createOrAppendToTable(statsTableName, formatPlugin, Collections.emptyList(),
StorageStrategy.DEFAULT);
}
@Override
public CreateTableEntry appendToStatsTable(String tableName) {
ensureNotStatsTable(tableName);
final String statsTableName = getStatsTableName(tableName);
FormatPlugin formatPlugin = plugin.getFormatPlugin(JSONFormatPlugin.PLUGIN_NAME);
return createOrAppendToTable(statsTableName, formatPlugin, Collections.emptyList(),
StorageStrategy.DEFAULT);
}
@Override
public Table getStatsTable(String tableName) {
return getTable(getStatsTableName(tableName));
}
private CreateTableEntry createOrAppendToTable(String tableName, FormatPlugin formatPlugin,
List<String> partitionColumns, StorageStrategy storageStrategy) {
if (formatPlugin == null) {
throw new UnsupportedOperationException(
String.format("Unsupported format '%s' in workspace '%s'", config.getDefaultInputFormat(),
Joiner.on(".").join(getSchemaPath())));
}
return new FileSystemCreateTableEntry(
(FileSystemConfig) plugin.getConfig(),
formatPlugin,
config.getLocation() + Path.SEPARATOR + tableName,
partitionColumns,
storageStrategy);
}
@Override
public String getTypeName() {
return FileSystemConfig.NAME;
}
@Override
public DrillTable create(TableInstance key) {
try {
FileSelectionInspector inspector = new FileSelectionInspector(key);
if (inspector.fileSelection == null) {
return null;
}
DrillTable table = inspector.matchFormat();
if (key.sig.getParams().size() == 0) {
return table;
} else {
return parseTableFunction(key, inspector, table);
}
} catch (AccessControlException e) {
if (!schemaConfig.getIgnoreAuthErrors()) {
logger.debug(e.getMessage());
throw UserException.permissionError(e)
.message("Not authorized to read table [%s] in schema [%s]", key, getFullSchemaName())
.build(logger);
}
} catch (IOException e) {
logger.debug("Failed to create DrillTable with root {} and name {}", config.getLocation(), key, e);
}
return null;
}
private DrillTable parseTableFunction(TableInstance key,
FileSelectionInspector inspector, DrillTable table) {
FileSelection newSelection = inspector.selection();
if (newSelection.isEmptyDirectory()) {
return new DynamicDrillTable(plugin, storageEngineName, schemaConfig.getUserName(),
inspector.fileSelection);
}
FormatPluginConfig baseConfig = inspector.formatMatch == null
? null : inspector.formatMatch.getFormatPlugin().getConfig();
FormatPluginConfig formatConfig = optionExtractor.createConfigForTable(key, mapper, baseConfig);
FormatSelection selection = new FormatSelection(formatConfig, newSelection);
DrillTable drillTable = new DynamicDrillTable(plugin, storageEngineName, schemaConfig.getUserName(), selection);
setMetadataProviderManager(drillTable, key.sig.getName());
List<TableParamDef> commonParams = key.sig.getCommonParams();
if (commonParams.isEmpty()) {
return drillTable;
}
// extract only common parameters related values
List<Object> paramValues = key.params.subList(key.params.size() - commonParams.size(), key.params.size());
return applyFunctionParameters(drillTable, commonParams, paramValues);
}
/**
* Expands given file selection if it has directories.
* If expanded file selection is null (i.e. directory is empty), sets empty directory status to true.
*
* @param fileSelection file selection
* @param hasDirectories flag that indicates if given file selection has directories
* @return revisited file selection
*/
private FileSelection expandSelection(FileSelection fileSelection, boolean hasDirectories) throws IOException {
FileSelection newSelection;
if (hasDirectories) {
newSelection = schemaConfig.getOption(ExecConstants.FILE_LISTING_LIMIT0_OPT_KEY).bool_val
? fileSelection.selectAnyFile(getFS())
: fileSelection.minusDirectories(getFS());
} else {
// We don't bother with single-file optimisation in this case
newSelection = fileSelection;
}
if (newSelection == null) {
// empty directory / selection means that this is the empty and schemaless table
fileSelection.setEmptyDirectoryStatus();
return fileSelection;
}
return newSelection;
}
private FormatMatcher findMatcher(FileStatus file) {
try {
for (FormatMatcher m : dropFileMatchers) {
if (m.isFileReadable(getFS(), file)) {
return m;
}
}
} catch (IOException e) {
logger.debug("Failed to find format matcher for file: {}", file, e);
}
return null;
}
private void setMetadataProviderManager(DrillTable table, String tableName) {
if (table != null) {
MetadataProviderManager providerManager = null;
if (schemaConfig.getOption(ExecConstants.METASTORE_ENABLED).bool_val) {
try {
MetastoreRegistry metastoreRegistry = plugin.getContext().getMetastoreRegistry();
TableInfo tableInfo = TableInfo.builder()
.storagePlugin(plugin.getName())
.workspace(schemaName)
.name(tableName)
.build();
MetastoreTableInfo metastoreTableInfo = metastoreRegistry.get()
.tables()
.basicRequests()
.metastoreTableInfo(tableInfo);
if (metastoreTableInfo.isExists()) {
providerManager = new MetastoreMetadataProviderManager(metastoreRegistry, tableInfo,
new MetastoreMetadataProviderConfig(schemaConfig.getOption(ExecConstants.METASTORE_USE_SCHEMA_METADATA).bool_val,
schemaConfig.getOption(ExecConstants.METASTORE_USE_STATISTICS_METADATA).bool_val,
schemaConfig.getOption(ExecConstants.METASTORE_FALLBACK_TO_FILE_METADATA).bool_val));
}
} catch (MetastoreException e) {
logger.warn("Exception happened during obtaining Metastore instance. File system metadata provider will be used.", e);
}
}
if (providerManager == null) {
providerManager = FileSystemMetadataProviderManager.init();
}
setMetadataTable(providerManager, table, tableName);
setSchema(providerManager, tableName);
table.setTableMetadataProviderManager(providerManager);
}
}
@Override
public void destroy(DrillTable value) {
}
/**
* Check if the table contains homogeneous files that can be read by Drill. Eg: parquet, json csv etc.
* However if it contains more than one of these formats or a totally different file format that Drill cannot
* understand then we will raise an exception.
* @param tableName name of the table to be checked for homogeneous property
* @return true if table contains homogeneous files, false otherwise
* @throws IOException is case of problems accessing table files
*/
private boolean isHomogeneous(String tableName) throws IOException {
FileSelection fileSelection = FileSelection.create(getFS(), config.getLocation(), tableName, config.allowAccessOutsideWorkspace());
if (fileSelection == null) {
throw UserException
.validationError()
.message(String.format("Table [%s] not found", tableName))
.build(logger);
}
FormatMatcher matcher = null;
Queue<FileStatus> listOfFiles = new LinkedList<>(fileSelection.getStatuses(getFS()));
while (!listOfFiles.isEmpty()) {
FileStatus currentFile = listOfFiles.poll();
if (currentFile.isDirectory()) {
listOfFiles.addAll(DrillFileSystemUtil.listFiles(getFS(), currentFile.getPath(), true));
} else {
if (matcher != null) {
if (!matcher.isFileReadable(getFS(), currentFile)) {
return false;
}
} else {
matcher = findMatcher(currentFile);
// Did not match any of the file patterns, exit
if (matcher == null) {
return false;
}
}
}
}
return true;
}
/**
* We check if the table contains homogeneous file formats that Drill can read. Once the checks are performed
* we rename the file to start with an "_". After the rename we issue a recursive delete of the directory.
* @param table - Path of table to be dropped
*/
@Override
public void dropTable(String table) {
DrillFileSystem fs = getFS();
String defaultLocation = getDefaultLocation();
try {
if (!isHomogeneous(table)) {
throw UserException
.validationError()
.message("Table contains different file formats. \n" +
"Drop Table is only supported for directories that contain homogeneous file formats consumable by Drill")
.build(logger);
}
StringBuilder tableRenameBuilder = new StringBuilder();
int lastSlashIndex = table.lastIndexOf(Path.SEPARATOR);
if (lastSlashIndex != -1) {
tableRenameBuilder.append(table, 0, lastSlashIndex + 1);
}
// Generate unique identifier which will be added as a suffix to the table name
ThreadLocalRandom r = ThreadLocalRandom.current();
long time = (System.currentTimeMillis()/1000);
long p1 = ((Integer.MAX_VALUE - time) << 32) + r.nextInt();
long p2 = r.nextLong();
final String fileNameDelimiter = DrillFileSystem.UNDERSCORE_PREFIX;
String[] pathSplit = table.split(Path.SEPARATOR);
/*
* Builds the string for the renamed table
* Prefixes the table name with an underscore (intent for this to be treated as a hidden file)
* and suffixes the table name with unique identifiers (similar to how we generate query id's)
* separated by underscores
*/
tableRenameBuilder
.append(DrillFileSystem.UNDERSCORE_PREFIX)
.append(pathSplit[pathSplit.length - 1])
.append(fileNameDelimiter)
.append(p1)
.append(fileNameDelimiter)
.append(p2);
String tableRename = tableRenameBuilder.toString();
fs.rename(new Path(defaultLocation, table), new Path(defaultLocation, tableRename));
fs.delete(new Path(defaultLocation, tableRename), true);
} catch (AccessControlException e) {
throw UserException
.permissionError(e)
.message("Unauthorized to drop table")
.build(logger);
} catch (IOException e) {
throw UserException
.dataWriteError(e)
.message("Failed to drop table: " + e.getMessage())
.build(logger);
}
}
@Override
public List<Map.Entry<String, TableType>> getTableNamesAndTypes() {
return Stream.concat(
tables.entrySet().stream().map(kv -> Pair.of(kv.getKey().sig.getName(), kv.getValue().getJdbcTableType())),
getViews().stream().map(viewName -> Pair.of(viewName, TableType.VIEW))
).collect(Collectors.toList());
}
/**
* Compute and retain file selection and format match properties used
* by multiple functions above.
*/
private class FileSelectionInspector {
private final TableInstance key;
private final DrillFileSystem fs;
public final FileSelection fileSelection;
public final boolean hasDirectories;
private FileSelection newSelection;
public FormatMatcher formatMatch;
public FileSelectionInspector(TableInstance key) throws IOException {
this.key = key;
this.fs = getFS();
String path = key.sig.getName();
FileSelection fileSelection = getFileSelection(path);
if (fileSelection == null) {
fileSelection = formatLocationTransformers.stream()
.filter(t -> t.canTransform(path))
.map(t -> t.transform(path, (CheckedFunction<String, FileSelection, IOException>) this::getFileSelection))
.findFirst()
.orElse(null);
}
this.fileSelection = fileSelection;
this.hasDirectories = fileSelection != null && fileSelection.containsDirectories(fs);
}
private FileSelection getFileSelection(String path) throws IOException {
return FileSelection.create(fs, config.getLocation(), path, config.allowAccessOutsideWorkspace());
}
protected DrillTable matchFormat() throws IOException {
if (hasDirectories) {
for (final FormatMatcher matcher : dirMatchers) {
try {
DrillTable table = matcher.isReadable(getFS(), fileSelection, plugin, storageEngineName, schemaConfig);
if (table != null) {
formatMatch = matcher;
setMetadataProviderManager(table, key.sig.getName());
return table;
}
} catch (IOException e) {
logger.debug("File read failed.", e);
}
}
}
newSelection = expandSelection(fileSelection, hasDirectories);
if (newSelection.isEmptyDirectory()) {
if (wsConfig.getDefaultInputFormat() == null) {
throw UserException.validationError()
.message("No files were found and no default format is set on the queried workspace.")
.addContext("workspace", Joiner.on(".").join(getSchemaPath()))
.addContext("table", key.sig.getName())
.build(logger);
}
return new DynamicDrillTable(plugin, storageEngineName, schemaConfig.getUserName(), fileSelection);
}
for (final FormatMatcher matcher : fileMatchers) {
DrillTable table = matcher.isReadable(getFS(), newSelection, plugin, storageEngineName, schemaConfig);
if (table != null) {
formatMatch = matcher;
setMetadataProviderManager(table, key.sig.getName());
return table;
}
}
return null;
}
public FileSelection selection() {
return newSelection != null ? newSelection : fileSelection;
}
}
}
}
|
apache/helix | 37,132 | helix-rest/src/main/java/org/apache/helix/rest/server/resources/helix/PerInstanceAccessor.java | package org.apache.helix.rest.server.resources.helix;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.IOException;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import com.codahale.metrics.annotation.ResponseMetered;
import com.codahale.metrics.annotation.Timed;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.collect.ImmutableMap;
import org.apache.helix.ConfigAccessor;
import org.apache.helix.HelixAdmin;
import org.apache.helix.HelixDataAccessor;
import org.apache.helix.HelixException;
import org.apache.helix.constants.InstanceConstants;
import org.apache.helix.manager.zk.ZKHelixDataAccessor;
import org.apache.helix.manager.zk.ZkBaseDataAccessor;
import org.apache.helix.model.CurrentState;
import org.apache.helix.model.Error;
import org.apache.helix.model.HealthStat;
import org.apache.helix.model.HelixConfigScope;
import org.apache.helix.model.InstanceConfig;
import org.apache.helix.model.LiveInstance;
import org.apache.helix.model.Message;
import org.apache.helix.model.ParticipantHistory;
import org.apache.helix.model.builder.HelixConfigScopeBuilder;
import org.apache.helix.rest.clusterMaintenanceService.HealthCheck;
import org.apache.helix.rest.clusterMaintenanceService.MaintenanceManagementService;
import org.apache.helix.rest.common.HttpConstants;
import org.apache.helix.rest.server.filters.ClusterAuth;
import org.apache.helix.rest.server.json.instance.InstanceInfo;
import org.apache.helix.rest.server.json.instance.StoppableCheck;
import org.apache.helix.util.InstanceUtil;
import org.apache.helix.zookeeper.datamodel.ZNRecord;
import org.eclipse.jetty.util.StringUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ClusterAuth
@Path("/clusters/{clusterId}/instances/{instanceName}")
public class PerInstanceAccessor extends AbstractHelixResource {
private final static Logger LOG = LoggerFactory.getLogger(PerInstanceAccessor.class);
public enum PerInstanceProperties {
config,
liveInstance,
resource,
resources,
partitions,
errors,
new_messages,
read_messages,
total_message_count,
read_message_count,
healthreports,
instanceTags,
health_check_list,
health_check_config,
operation_list,
operation_config,
continueOnFailures,
skipZKRead,
performOperation
}
private static class MaintenanceOpInputFields {
List<String> healthChecks = null;
Map<String, String> healthCheckConfig = null;
List<String> operations = null;
Map<String, String> operationConfig = null;
Set<String> nonBlockingHelixCheck = new HashSet<>();
boolean skipZKRead = false;
boolean performOperation = true;
}
@ResponseMetered(name = HttpConstants.READ_REQUEST)
@Timed(name = HttpConstants.READ_REQUEST)
@GET
public Response getInstanceById(@PathParam("clusterId") String clusterId,
@PathParam("instanceName") String instanceName, @QueryParam("skipZKRead") String skipZKRead,
@DefaultValue("getInstance") @QueryParam("command") String command) {
// Get the command. If not provided, the default would be "getInstance"
Command cmd;
try {
cmd = Command.valueOf(command);
} catch (Exception e) {
return badRequest("Invalid command : " + command);
}
switch (cmd) {
case getInstance:
HelixDataAccessor dataAccessor = getDataAccssor(clusterId);
// TODO reduce GC by dependency injection
MaintenanceManagementService service =
new MaintenanceManagementService((ZKHelixDataAccessor) dataAccessor, getConfigAccessor(),
Boolean.parseBoolean(skipZKRead), getNamespace());
InstanceInfo instanceInfo = service.getInstanceHealthInfo(clusterId, instanceName,
HealthCheck.STARTED_AND_HEALTH_CHECK_LIST);
String instanceInfoString;
try {
instanceInfoString = OBJECT_MAPPER.writeValueAsString(instanceInfo);
} catch (JsonProcessingException e) {
return serverError(e);
}
return OK(instanceInfoString);
case validateWeight:
// Validates instanceConfig for WAGED rebalance
HelixAdmin admin = getHelixAdmin();
Map<String, Boolean> validationResultMap;
try {
validationResultMap = admin.validateInstancesForWagedRebalance(clusterId,
Collections.singletonList(instanceName));
} catch (HelixException e) {
return badRequest(e.getMessage());
}
return JSONRepresentation(validationResultMap);
default:
LOG.error("Unsupported command :" + command);
return badRequest("Unsupported command :" + command);
}
}
/**
* Performs health checks for an instance to answer if it is stoppable.
*
* @param jsonContent json payload
* @param clusterId cluster id
* @param instanceName Instance name to be checked
* @param skipZKRead skip reading from zk server
* @param continueOnFailures whether or not continue to perform the subsequent checks if previous
* check fails. If false, when helix own check fails, the subsequent
* custom checks will not be performed.
* @param skipHealthCheckCategories StoppableCheck Categories to skip.
* @return json response representing if queried instance is stoppable
* @throws IOException if there is any IO/network error
*/
@ResponseMetered(name = HttpConstants.WRITE_REQUEST)
@Timed(name = HttpConstants.WRITE_REQUEST)
@POST
@Path("stoppable")
@Consumes(MediaType.APPLICATION_JSON)
public Response isInstanceStoppable(String jsonContent, @PathParam("clusterId") String clusterId,
@PathParam("instanceName") String instanceName, @QueryParam("skipZKRead") boolean skipZKRead,
@QueryParam("continueOnFailures") boolean continueOnFailures,
@QueryParam("skipHealthCheckCategories") String skipHealthCheckCategories)
throws IOException {
Set<StoppableCheck.Category> skipHealthCheckCategorySet;
try {
skipHealthCheckCategorySet = skipHealthCheckCategories != null
? StoppableCheck.Category.categorySetFromCommaSeperatedString(skipHealthCheckCategories)
: Collections.emptySet();
if (!MaintenanceManagementService.SKIPPABLE_HEALTH_CHECK_CATEGORIES.containsAll(
skipHealthCheckCategorySet)) {
throw new IllegalArgumentException(
"Some of the provided skipHealthCheckCategories are not skippable. The supported skippable categories are: "
+ MaintenanceManagementService.SKIPPABLE_HEALTH_CHECK_CATEGORIES);
}
} catch (Exception e) {
return badRequest("Invalid skipHealthCheckCategories: " + skipHealthCheckCategories + "\n"
+ e.getMessage());
}
HelixDataAccessor dataAccessor = getDataAccssor(clusterId);
MaintenanceManagementService maintenanceService =
new MaintenanceManagementService((ZKHelixDataAccessor) dataAccessor, getConfigAccessor(),
skipZKRead, continueOnFailures, skipHealthCheckCategorySet, getNamespace());
StoppableCheck stoppableCheck;
try {
JsonNode node = null;
if (jsonContent.length() != 0) {
node = OBJECT_MAPPER.readTree(jsonContent);
}
if (node == null) {
return badRequest("Invalid input for content : " + jsonContent);
}
String customizedInput = null;
if (node.get(InstancesAccessor.InstancesProperties.customized_values.name()) != null) {
customizedInput = node.get(InstancesAccessor.InstancesProperties.customized_values.name()).toString();
}
stoppableCheck =
maintenanceService.getInstanceStoppableCheck(clusterId, instanceName, customizedInput);
} catch (HelixException e) {
LOG.error("Current cluster: {}, instance: {} has issue with health checks!", clusterId,
instanceName, e);
return serverError(e);
}
return OK(OBJECT_MAPPER.writeValueAsString(stoppableCheck));
}
/**
* Performs health checks, user designed operation check and execution for take an instance.
*
* @param jsonContent json payload
* @param clusterId cluster id
* @param instanceName Instance name to be checked
* @return json response representing if queried instance is stoppable
* @throws IOException if there is any IO/network error
*/
@ResponseMetered(name = HttpConstants.WRITE_REQUEST)
@Timed(name = HttpConstants.WRITE_REQUEST)
@POST
@Path("takeInstance")
@Consumes(MediaType.APPLICATION_JSON)
public Response takeSingleInstance(
String jsonContent,
@PathParam("clusterId") String clusterId,
@PathParam("instanceName") String instanceName){
try {
MaintenanceOpInputFields inputFields = readMaintenanceInputFromJson(jsonContent);
if (inputFields == null) {
return badRequest("Invalid input for content : " + jsonContent);
}
MaintenanceManagementService maintenanceManagementService =
new MaintenanceManagementService((ZKHelixDataAccessor) getDataAccssor(clusterId),
getConfigAccessor(), inputFields.skipZKRead, inputFields.nonBlockingHelixCheck,
getNamespace());
return JSONRepresentation(maintenanceManagementService
.takeInstance(clusterId, instanceName, inputFields.healthChecks,
inputFields.healthCheckConfig,
inputFields.operations,
inputFields.operationConfig, inputFields.performOperation));
} catch (Exception e) {
LOG.error("Failed to takeInstances:", e);
return badRequest("Failed to takeInstances: " + e.getMessage());
}
}
/**
* Performs health checks, user designed operation check and execution for free an instance.
*
* @param jsonContent json payload
* @param clusterId cluster id
* @param instanceName Instance name to be checked
* @return json response representing if queried instance is stoppable
* @throws IOException if there is any IO/network error
*/
@ResponseMetered(name = HttpConstants.WRITE_REQUEST)
@Timed(name = HttpConstants.WRITE_REQUEST)
@POST
@Path("freeInstance")
@Consumes(MediaType.APPLICATION_JSON)
public Response freeSingleInstance(
String jsonContent,
@PathParam("clusterId") String clusterId,
@PathParam("instanceName") String instanceName){
try {
MaintenanceOpInputFields inputFields = readMaintenanceInputFromJson(jsonContent);
if (inputFields == null) {
return badRequest("Invalid input for content : " + jsonContent);
}
if (inputFields.healthChecks.size() != 0) {
LOG.warn("freeSingleInstance won't perform user passed health check.");
}
MaintenanceManagementService maintenanceManagementService =
new MaintenanceManagementService((ZKHelixDataAccessor) getDataAccssor(clusterId),
getConfigAccessor(), inputFields.skipZKRead, inputFields.nonBlockingHelixCheck,
getNamespace());
return JSONRepresentation(maintenanceManagementService
.freeInstance(clusterId, instanceName, inputFields.healthChecks,
inputFields.healthCheckConfig,
inputFields.operations,
inputFields.operationConfig, inputFields.performOperation));
} catch (Exception e) {
LOG.error("Failed to takeInstances:", e);
return badRequest("Failed to takeInstances: " + e.getMessage());
}
}
private MaintenanceOpInputFields readMaintenanceInputFromJson(String jsonContent) throws IOException {
JsonNode node = null;
if (jsonContent.length() != 0) {
node = OBJECT_MAPPER.readTree(jsonContent);
}
if (node == null) {
return null;
}
MaintenanceOpInputFields inputFields = new MaintenanceOpInputFields();
String continueOnFailuresName = PerInstanceProperties.continueOnFailures.name();
String skipZKReadName = PerInstanceProperties.skipZKRead.name();
String performOperation = PerInstanceProperties.performOperation.name();
inputFields.healthChecks = MaintenanceManagementService
.getListFromJsonPayload(node.get(PerInstanceProperties.health_check_list.name()));
inputFields.healthCheckConfig = MaintenanceManagementService
.getMapFromJsonPayload(node.get(PerInstanceProperties.health_check_config.name()));
if (inputFields.healthCheckConfig != null) {
if (inputFields.healthCheckConfig.containsKey(continueOnFailuresName)) {
inputFields.nonBlockingHelixCheck = new HashSet<String>(MaintenanceManagementService
.getListFromJsonPayload(inputFields.healthCheckConfig.get(continueOnFailuresName)));
// healthCheckConfig will be passed to customer's health check directly, we need to
// remove unrelated kc paris.
inputFields.healthCheckConfig.remove(continueOnFailuresName);
}
if (inputFields.healthCheckConfig.containsKey(skipZKReadName)) {
inputFields.skipZKRead =
Boolean.parseBoolean(inputFields.healthCheckConfig.get(skipZKReadName));
inputFields.healthCheckConfig.remove(skipZKReadName);
}
}
inputFields.operations = MaintenanceManagementService
.getListFromJsonPayload(node.get(PerInstanceProperties.operation_list.name()));
inputFields.operationConfig = MaintenanceManagementService
.getMapFromJsonPayload(node.get(PerInstanceProperties.operation_config.name()));
if (inputFields.operationConfig != null && inputFields.operationConfig
.containsKey(performOperation)) {
inputFields.performOperation =
Boolean.parseBoolean(inputFields.operationConfig.get(performOperation));
}
LOG.debug("Input fields for take/free Instance" + inputFields.toString());
return inputFields;
}
@ResponseMetered(name = HttpConstants.WRITE_REQUEST)
@Timed(name = HttpConstants.WRITE_REQUEST)
@PUT
public Response addInstance(@PathParam("clusterId") String clusterId,
@PathParam("instanceName") String instanceName, String content) {
HelixAdmin admin = getHelixAdmin();
ZNRecord record;
try {
record = toZNRecord(content);
} catch (IOException e) {
LOG.error("Failed to deserialize user's input " + content + ", Exception: " + e);
return badRequest("Input is not a vaild ZNRecord!");
}
try {
admin.addInstance(clusterId, new InstanceConfig(record));
} catch (Exception ex) {
LOG.error("Error in adding an instance: " + instanceName, ex);
return serverError(ex);
}
return OK();
}
@ResponseMetered(name = HttpConstants.WRITE_REQUEST)
@Timed(name = HttpConstants.WRITE_REQUEST)
@POST
public Response updateInstance(@PathParam("clusterId") String clusterId,
@PathParam("instanceName") String instanceName, @QueryParam("command") String command,
@QueryParam("instanceOperation") InstanceConstants.InstanceOperation instanceOperation,
@QueryParam("instanceOperationSource") InstanceConstants.InstanceOperationSource instanceOperationSource,
@QueryParam("reason") String reason,
@Deprecated @QueryParam("instanceDisabledType") String disabledType,
@Deprecated @QueryParam("instanceDisabledReason") String disabledReason,
@QueryParam("force") boolean force, String content) {
Command cmd;
try {
cmd = Command.valueOf(command);
} catch (Exception e) {
return badRequest("Invalid command : " + command);
}
HelixAdmin admin = getHelixAdmin();
try {
JsonNode node = null;
if (content.length() != 0) {
node = OBJECT_MAPPER.readTree(content);
}
switch (cmd) {
case enable:
admin.enableInstance(clusterId, instanceName, true);
break;
case disable:
InstanceConstants.InstanceDisabledType disabledTypeEnum = null;
if (disabledType != null) {
try {
disabledTypeEnum = InstanceConstants.InstanceDisabledType.valueOf(disabledType);
} catch (IllegalArgumentException ex) {
return badRequest("Invalid instanceDisabledType!");
}
}
admin.enableInstance(clusterId, instanceName, false, disabledTypeEnum, disabledReason);
break;
case reset:
case resetPartitions:
if (!validInstance(node, instanceName)) {
return badRequest("Instance names are not match!");
}
admin.resetPartition(clusterId, instanceName,
node.get(PerInstanceProperties.resource.name()).textValue(),
(List<String>) OBJECT_MAPPER.readValue(
node.get(PerInstanceProperties.partitions.name()).toString(),
OBJECT_MAPPER.getTypeFactory()
.constructCollectionType(List.class, String.class)));
break;
case setPartitionsToError:
if (!validInstance(node, instanceName)) {
return badRequest("Instance names are not a match!");
}
admin.setPartitionsToError(clusterId, instanceName,
node.get(PerInstanceProperties.resource.name()).textValue(),
(List<String>) OBJECT_MAPPER.readValue(
node.get(PerInstanceProperties.partitions.name()).toString(), OBJECT_MAPPER
.getTypeFactory().constructCollectionType(List.class, String.class)));
break;
case setInstanceOperation:
InstanceUtil.setInstanceOperation(new ConfigAccessor(getRealmAwareZkClient()),
new ZkBaseDataAccessor<>(getRealmAwareZkClient()), clusterId, instanceName,
new InstanceConfig.InstanceOperation.Builder().setOperation(instanceOperation)
.setReason(reason).setSource(
force ? InstanceConstants.InstanceOperationSource.ADMIN : instanceOperationSource)
.build());
break;
case canCompleteSwap:
return OK(OBJECT_MAPPER.writeValueAsString(
ImmutableMap.of("successful", admin.canCompleteSwap(clusterId, instanceName))));
case completeSwapIfPossible:
return OK(OBJECT_MAPPER.writeValueAsString(
ImmutableMap.of("successful", admin.completeSwapIfPossible(clusterId, instanceName, force))));
case addInstanceTag:
if (!validInstance(node, instanceName)) {
return badRequest("Instance names are not match!");
}
for (String tag : (List<String>) OBJECT_MAPPER.readValue(
node.get(PerInstanceProperties.instanceTags.name()).toString(),
OBJECT_MAPPER.getTypeFactory().constructCollectionType(List.class, String.class))) {
admin.addInstanceTag(clusterId, instanceName, tag);
}
break;
case removeInstanceTag:
if (!validInstance(node, instanceName)) {
return badRequest("Instance names are not match!");
}
for (String tag : (List<String>) OBJECT_MAPPER.readValue(
node.get(PerInstanceProperties.instanceTags.name()).toString(),
OBJECT_MAPPER.getTypeFactory().constructCollectionType(List.class, String.class))) {
admin.removeInstanceTag(clusterId, instanceName, tag);
}
break;
case enablePartitions:
admin.enablePartition(true, clusterId, instanceName,
node.get(PerInstanceProperties.resource.name()).textValue(),
(List<String>) OBJECT_MAPPER.readValue(
node.get(PerInstanceProperties.partitions.name()).toString(),
OBJECT_MAPPER.getTypeFactory()
.constructCollectionType(List.class, String.class)));
break;
case disablePartitions:
admin.enablePartition(false, clusterId, instanceName,
node.get(PerInstanceProperties.resource.name()).textValue(),
(List<String>) OBJECT_MAPPER.readValue(
node.get(PerInstanceProperties.partitions.name()).toString(),
OBJECT_MAPPER.getTypeFactory()
.constructCollectionType(List.class, String.class)));
break;
case isEvacuateFinished:
boolean evacuateFinished;
try {
evacuateFinished = admin.isEvacuateFinished(clusterId, instanceName);
} catch (HelixException e) {
LOG.error(String.format("Encountered error when checking if evacuation finished for cluster: "
+ "{}, instance: {}", clusterId, instanceName), e);
return serverError(e);
}
return OK(OBJECT_MAPPER.writeValueAsString(ImmutableMap.of("successful", evacuateFinished)));
case forceKillInstance:
boolean instanceForceKilled = admin.forceKillInstance(clusterId, instanceName, reason, instanceOperationSource);
if (!instanceForceKilled) {
return serverError("Failed to forcefully kill instance: " + instanceName +
". Possible that instance was already stopped.");
}
return OK(OBJECT_MAPPER.writeValueAsString(ImmutableMap.of("successful", instanceForceKilled)));
default:
LOG.error("Unsupported command :" + command);
return badRequest("Unsupported command :" + command);
}
} catch (Exception e) {
LOG.error("Failed in updating instance : " + instanceName, e);
return badRequest(e.getMessage());
}
return OK();
}
@ResponseMetered(name = HttpConstants.WRITE_REQUEST)
@Timed(name = HttpConstants.WRITE_REQUEST)
@DELETE
public Response deleteInstance(@PathParam("clusterId") String clusterId,
@PathParam("instanceName") String instanceName) {
HelixAdmin admin = getHelixAdmin();
try {
InstanceConfig instanceConfig = admin.getInstanceConfig(clusterId, instanceName);
admin.dropInstance(clusterId, instanceConfig);
} catch (HelixException e) {
return badRequest(e.getMessage());
}
return OK();
}
@ResponseMetered(name = HttpConstants.READ_REQUEST)
@Timed(name = HttpConstants.READ_REQUEST)
@GET
@Path("configs")
public Response getInstanceConfig(@PathParam("clusterId") String clusterId,
@PathParam("instanceName") String instanceName) throws IOException {
HelixDataAccessor accessor = getDataAccssor(clusterId);
InstanceConfig instanceConfig =
accessor.getProperty(accessor.keyBuilder().instanceConfig(instanceName));
if (instanceConfig != null) {
return JSONRepresentation(instanceConfig.getRecord());
}
return notFound();
}
@ResponseMetered(name = HttpConstants.WRITE_REQUEST)
@Timed(name = HttpConstants.WRITE_REQUEST)
@POST
@Path("configs")
public Response updateInstanceConfig(@PathParam("clusterId") String clusterId,
@PathParam("instanceName") String instanceName, @QueryParam("command") String commandStr,
String content) {
Command command;
if (commandStr == null || commandStr.isEmpty()) {
command = Command.update; // Default behavior to keep it backward-compatible
} else {
try {
command = getCommand(commandStr);
} catch (HelixException ex) {
return badRequest(ex.getMessage());
}
}
ZNRecord record;
try {
record = toZNRecord(content);
} catch (IOException e) {
LOG.error("Failed to deserialize user's input " + content + ", Exception: " + e);
return badRequest("Input is not a vaild ZNRecord!");
}
InstanceConfig instanceConfig = new InstanceConfig(record);
ConfigAccessor configAccessor = getConfigAccessor();
try {
switch (command) {
case update:
/*
* The new instanceConfig will be merged with existing one.
* Even if the instance is disabled, non-valid instance topology config will cause rebalance
* failure. We are doing the check whenever user updates InstanceConfig.
*/
validateDeltaTopologySettingInInstanceConfig(clusterId, instanceName, configAccessor,
instanceConfig, command);
configAccessor.updateInstanceConfig(clusterId, instanceName, instanceConfig);
break;
case delete:
validateDeltaTopologySettingInInstanceConfig(clusterId, instanceName, configAccessor,
instanceConfig, command);
HelixConfigScope instanceScope =
new HelixConfigScopeBuilder(HelixConfigScope.ConfigScopeProperty.PARTICIPANT)
.forCluster(clusterId).forParticipant(instanceName).build();
configAccessor.remove(instanceScope, record);
break;
default:
return badRequest(String.format("Unsupported command: %s", command));
}
} catch (IllegalArgumentException ex) {
LOG.error(String.format("Invalid topology setting for Instance : {}. Fail the config update",
instanceName), ex);
return serverError(ex);
} catch (HelixException ex) {
return notFound(ex.getMessage());
} catch (Exception ex) {
LOG.error(String.format("Error in update instance config for instance: %s", instanceName),
ex);
return serverError(ex);
}
return OK();
}
@ResponseMetered(name = HttpConstants.READ_REQUEST)
@Timed(name = HttpConstants.READ_REQUEST)
@GET
@Path("resources")
public Response getResourcesOnInstance(@PathParam("clusterId") String clusterId,
@PathParam("instanceName") String instanceName) throws IOException {
HelixDataAccessor accessor = getDataAccssor(clusterId);
ObjectNode root = JsonNodeFactory.instance.objectNode();
root.put(Properties.id.name(), instanceName);
ArrayNode resourcesNode = root.putArray(PerInstanceProperties.resources.name());
List<String> liveInstances = accessor.getChildNames(accessor.keyBuilder().liveInstances());
if (!liveInstances.contains(instanceName)) {
return null;
}
LiveInstance liveInstance =
accessor.getProperty(accessor.keyBuilder().liveInstance(instanceName));
// get the current session id
String currentSessionId = liveInstance.getEphemeralOwner();
List<String> resources =
accessor.getChildNames(accessor.keyBuilder().currentStates(instanceName, currentSessionId));
resources.addAll(accessor
.getChildNames(accessor.keyBuilder().taskCurrentStates(instanceName, currentSessionId)));
if (resources.size() > 0) {
resourcesNode.addAll((ArrayNode) OBJECT_MAPPER.valueToTree(resources));
}
return JSONRepresentation(root);
}
@ResponseMetered(name = HttpConstants.READ_REQUEST)
@Timed(name = HttpConstants.READ_REQUEST)
@GET @Path("resources/{resourceName}")
public Response getResourceOnInstance(@PathParam("clusterId") String clusterId,
@PathParam("instanceName") String instanceName,
@PathParam("resourceName") String resourceName) throws IOException {
HelixDataAccessor accessor = getDataAccssor(clusterId);
List<String> liveInstances = accessor.getChildNames(accessor.keyBuilder().liveInstances());
if (!liveInstances.contains(instanceName)) {
return notFound();
}
LiveInstance liveInstance =
accessor.getProperty(accessor.keyBuilder().liveInstance(instanceName));
// get the current session id
String currentSessionId = liveInstance.getEphemeralOwner();
CurrentState resourceCurrentState = accessor.getProperty(
accessor.keyBuilder().currentState(instanceName, currentSessionId, resourceName));
if (resourceCurrentState == null) {
resourceCurrentState = accessor.getProperty(
accessor.keyBuilder().taskCurrentState(instanceName, currentSessionId, resourceName));
}
if (resourceCurrentState != null) {
return JSONRepresentation(resourceCurrentState.getRecord());
}
return notFound();
}
@ResponseMetered(name = HttpConstants.READ_REQUEST)
@Timed(name = HttpConstants.READ_REQUEST)
@GET
@Path("errors")
public Response getErrorsOnInstance(@PathParam("clusterId") String clusterId,
@PathParam("instanceName") String instanceName) throws IOException {
HelixDataAccessor accessor = getDataAccssor(clusterId);
ObjectNode root = JsonNodeFactory.instance.objectNode();
root.put(Properties.id.name(), instanceName);
ObjectNode errorsNode = JsonNodeFactory.instance.objectNode();
List<String> sessionIds = accessor.getChildNames(accessor.keyBuilder().errors(instanceName));
if (sessionIds == null || sessionIds.size() == 0) {
return notFound();
}
for (String sessionId : sessionIds) {
List<String> resources =
accessor.getChildNames(accessor.keyBuilder().errors(instanceName, sessionId));
if (resources != null) {
ObjectNode resourcesNode = JsonNodeFactory.instance.objectNode();
for (String resourceName : resources) {
List<String> partitions = accessor
.getChildNames(accessor.keyBuilder().errors(instanceName, sessionId, resourceName));
if (partitions != null) {
ArrayNode partitionsNode = resourcesNode.putArray(resourceName);
partitionsNode.addAll((ArrayNode) OBJECT_MAPPER.valueToTree(partitions));
}
}
errorsNode.put(sessionId, resourcesNode);
}
}
root.put(PerInstanceProperties.errors.name(), errorsNode);
return JSONRepresentation(root);
}
@ResponseMetered(name = HttpConstants.READ_REQUEST)
@Timed(name = HttpConstants.READ_REQUEST)
@GET
@Path("errors/{sessionId}/{resourceName}/{partitionName}")
public Response getErrorsOnInstance(@PathParam("clusterId") String clusterId,
@PathParam("instanceName") String instanceName, @PathParam("sessionId") String sessionId,
@PathParam("resourceName") String resourceName,
@PathParam("partitionName") String partitionName) throws IOException {
HelixDataAccessor accessor = getDataAccssor(clusterId);
Error error = accessor.getProperty(accessor.keyBuilder()
.stateTransitionError(instanceName, sessionId, resourceName, partitionName));
if (error != null) {
return JSONRepresentation(error.getRecord());
}
return notFound();
}
@ResponseMetered(name = HttpConstants.READ_REQUEST)
@Timed(name = HttpConstants.READ_REQUEST)
@GET
@Path("history")
public Response getHistoryOnInstance(@PathParam("clusterId") String clusterId,
@PathParam("instanceName") String instanceName) throws IOException {
HelixDataAccessor accessor = getDataAccssor(clusterId);
ParticipantHistory history =
accessor.getProperty(accessor.keyBuilder().participantHistory(instanceName));
if (history != null) {
return JSONRepresentation(history.getRecord());
}
return notFound();
}
@ResponseMetered(name = HttpConstants.READ_REQUEST)
@Timed(name = HttpConstants.READ_REQUEST)
@GET
@Path("messages")
public Response getMessagesOnInstance(@PathParam("clusterId") String clusterId,
@PathParam("instanceName") String instanceName,
@QueryParam("stateModelDef") String stateModelDef) {
HelixDataAccessor accessor = getDataAccssor(clusterId);
ObjectNode root = JsonNodeFactory.instance.objectNode();
root.put(Properties.id.name(), instanceName);
ArrayNode newMessages = root.putArray(PerInstanceProperties.new_messages.name());
ArrayNode readMessages = root.putArray(PerInstanceProperties.read_messages.name());
List<String> messageNames =
accessor.getChildNames(accessor.keyBuilder().messages(instanceName));
if (messageNames == null || messageNames.size() == 0) {
LOG.warn("Unable to get any messages on instance: " + instanceName);
return notFound();
}
for (String messageName : messageNames) {
Message message = accessor.getProperty(accessor.keyBuilder().message(instanceName, messageName));
if (message == null) {
LOG.warn("Message is deleted given message name: ", messageName);
continue;
}
// if stateModelDef is valid, keep messages with StateModelDef equals to the parameter
if (StringUtil.isNotBlank(stateModelDef) && !stateModelDef.equals(message.getStateModelDef())) {
continue;
}
if (Message.MessageState.NEW.equals(message.getMsgState())) {
newMessages.add(messageName);
} else if (Message.MessageState.READ.equals(message.getMsgState())) {
readMessages.add(messageName);
}
}
root.put(PerInstanceProperties.total_message_count.name(),
newMessages.size() + readMessages.size());
root.put(PerInstanceProperties.read_message_count.name(), readMessages.size());
return JSONRepresentation(root);
}
@ResponseMetered(name = HttpConstants.READ_REQUEST)
@Timed(name = HttpConstants.READ_REQUEST)
@GET
@Path("messages/{messageId}")
public Response getMessageOnInstance(@PathParam("clusterId") String clusterId,
@PathParam("instanceName") String instanceName,
@PathParam("messageId") String messageId) throws IOException {
HelixDataAccessor accessor = getDataAccssor(clusterId);
Message message = accessor.getProperty(accessor.keyBuilder().message(instanceName, messageId));
if (message != null) {
return JSONRepresentation(message.getRecord());
}
return notFound();
}
@ResponseMetered(name = HttpConstants.READ_REQUEST)
@Timed(name = HttpConstants.READ_REQUEST)
@GET
@Path("healthreports")
public Response getHealthReportsOnInstance(@PathParam("clusterId") String clusterId,
@PathParam("instanceName") String instanceName) throws IOException {
HelixDataAccessor accessor = getDataAccssor(clusterId);
ObjectNode root = JsonNodeFactory.instance.objectNode();
root.put(Properties.id.name(), instanceName);
ArrayNode healthReportsNode = root.putArray(PerInstanceProperties.healthreports.name());
List<String> healthReports =
accessor.getChildNames(accessor.keyBuilder().healthReports(instanceName));
if (healthReports != null && healthReports.size() > 0) {
healthReportsNode.addAll((ArrayNode) OBJECT_MAPPER.valueToTree(healthReports));
}
return JSONRepresentation(root);
}
@ResponseMetered(name = HttpConstants.READ_REQUEST)
@Timed(name = HttpConstants.READ_REQUEST)
@GET
@Path("healthreports/{reportName}")
public Response getHealthReportsOnInstance(
@PathParam("clusterId") String clusterId, @PathParam("instanceName") String instanceName,
@PathParam("reportName") String reportName) throws IOException {
HelixDataAccessor accessor = getDataAccssor(clusterId);
HealthStat healthStat =
accessor.getProperty(accessor.keyBuilder().healthReport(instanceName, reportName));
if (healthStat != null) {
return JSONRepresentation(healthStat);
}
return notFound();
}
private boolean validInstance(JsonNode node, String instanceName) {
return instanceName.equals(node.get(Properties.id.name()).textValue());
}
private boolean validateDeltaTopologySettingInInstanceConfig(String clusterName,
String instanceName, ConfigAccessor configAccessor, InstanceConfig newInstanceConfig,
Command command) {
InstanceConfig originalInstanceConfigCopy =
configAccessor.getInstanceConfig(clusterName, instanceName);
if (command == Command.delete) {
for (Map.Entry<String, String> entry : newInstanceConfig.getRecord().getSimpleFields()
.entrySet()) {
originalInstanceConfigCopy.getRecord().getSimpleFields().remove(entry.getKey());
}
} else {
originalInstanceConfigCopy.getRecord().update(newInstanceConfig.getRecord());
}
return originalInstanceConfigCopy
.validateTopologySettingInInstanceConfig(configAccessor.getClusterConfig(clusterName),
instanceName);
}
}
|
apache/hop | 37,101 | plugins/transforms/salesforce/src/main/java/org/apache/hop/pipeline/transforms/salesforceupdate/SalesforceUpdateDialog.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hop.pipeline.transforms.salesforceupdate;
import java.util.ArrayList;
import java.util.List;
import org.apache.hop.core.Const;
import org.apache.hop.core.Props;
import org.apache.hop.core.SourceToTargetMapping;
import org.apache.hop.core.exception.HopException;
import org.apache.hop.core.row.IRowMeta;
import org.apache.hop.core.row.IValueMeta;
import org.apache.hop.core.row.RowMeta;
import org.apache.hop.core.row.value.ValueMetaNone;
import org.apache.hop.core.util.Utils;
import org.apache.hop.core.variables.IVariables;
import org.apache.hop.i18n.BaseMessages;
import org.apache.hop.pipeline.PipelineMeta;
import org.apache.hop.pipeline.transform.TransformMeta;
import org.apache.hop.pipeline.transforms.salesforce.SalesforceConnection;
import org.apache.hop.pipeline.transforms.salesforce.SalesforceConnectionUtils;
import org.apache.hop.pipeline.transforms.salesforce.SalesforceTransformDialog;
import org.apache.hop.pipeline.transforms.salesforce.SalesforceTransformMeta;
import org.apache.hop.ui.core.ConstUi;
import org.apache.hop.ui.core.PropsUi;
import org.apache.hop.ui.core.dialog.BaseDialog;
import org.apache.hop.ui.core.dialog.EnterMappingDialog;
import org.apache.hop.ui.core.dialog.ErrorDialog;
import org.apache.hop.ui.core.dialog.MessageBox;
import org.apache.hop.ui.core.gui.GuiResource;
import org.apache.hop.ui.core.widget.ColumnInfo;
import org.apache.hop.ui.core.widget.ComboVar;
import org.apache.hop.ui.core.widget.LabelTextVar;
import org.apache.hop.ui.core.widget.TableView;
import org.apache.hop.ui.core.widget.TextVar;
import org.apache.hop.ui.hopgui.HopGui;
import org.apache.hop.ui.pipeline.transform.BaseTransformDialog;
import org.apache.hop.ui.pipeline.transform.ComponentSelectionListener;
import org.apache.hop.ui.pipeline.transform.ITableItemInsertListener;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CTabFolder;
import org.eclipse.swt.custom.CTabItem;
import org.eclipse.swt.events.FocusListener;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.graphics.Cursor;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Group;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.TableItem;
import org.eclipse.swt.widgets.Text;
public class SalesforceUpdateDialog extends SalesforceTransformDialog {
private static final Class<?> PKG = SalesforceUpdateMeta.class;
private final List<String> inputFields = new ArrayList<>();
private ColumnInfo[] ciReturn;
private TableView wReturn;
private SalesforceUpdateMeta input;
private LabelTextVar wUserName;
private LabelTextVar wURL;
private LabelTextVar wPassword;
private TextVar wBatchSize;
private ComboVar wModule;
private Button wUseCompression;
private TextVar wTimeOut;
private Button wRollbackAllChangesOnError;
/** List of ColumnInfo that should have the field names of the selected salesforce module */
private List<ColumnInfo> tableFieldColumns = new ArrayList<>();
private boolean gotModule = false;
private boolean gotFields = false;
private boolean getModulesListError = false; /* True if error getting modules list */
public SalesforceUpdateDialog(
Shell parent,
IVariables variables,
SalesforceUpdateMeta transformMeta,
PipelineMeta pipelineMeta) {
super(parent, variables, transformMeta, pipelineMeta);
input = transformMeta;
}
@Override
public String open() {
Shell parent = getParent();
shell = new Shell(parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MAX | SWT.MIN);
PropsUi.setLook(shell);
setShellImage(shell, input);
ModifyListener lsMod = e -> input.setChanged();
ModifyListener lsTableMod =
arg0 -> {
input.setChanged();
setModuleFieldCombo();
};
SelectionAdapter lsSelection =
new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
input.setChanged();
setModuleFieldCombo();
}
};
changed = input.hasChanged();
FormLayout formLayout = new FormLayout();
formLayout.marginWidth = PropsUi.getFormMargin();
formLayout.marginHeight = PropsUi.getFormMargin();
shell.setLayout(formLayout);
shell.setText(BaseMessages.getString(PKG, "SalesforceUpdateDialog.DialogTitle"));
int middle = props.getMiddlePct();
int margin = PropsUi.getMargin();
// TransformName line
wlTransformName = new Label(shell, SWT.RIGHT);
wlTransformName.setText(BaseMessages.getString(PKG, "System.TransformName.Label"));
wlTransformName.setToolTipText(BaseMessages.getString(PKG, "System.TransformName.Tooltip"));
PropsUi.setLook(wlTransformName);
fdlTransformName = new FormData();
fdlTransformName.left = new FormAttachment(0, 0);
fdlTransformName.top = new FormAttachment(0, margin);
fdlTransformName.right = new FormAttachment(middle, -margin);
wlTransformName.setLayoutData(fdlTransformName);
wTransformName = new Text(shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
wTransformName.setText(transformName);
PropsUi.setLook(wTransformName);
wTransformName.addModifyListener(lsMod);
fdTransformName = new FormData();
fdTransformName.left = new FormAttachment(middle, 0);
fdTransformName.top = new FormAttachment(0, margin);
fdTransformName.right = new FormAttachment(100, 0);
wTransformName.setLayoutData(fdTransformName);
CTabFolder wTabFolder = new CTabFolder(shell, SWT.BORDER);
PropsUi.setLook(wTabFolder, Props.WIDGET_STYLE_TAB);
// ////////////////////////
// START OF FILE TAB ///
// ////////////////////////
CTabItem wGeneralTab = new CTabItem(wTabFolder, SWT.NONE);
wGeneralTab.setFont(GuiResource.getInstance().getFontDefault());
wGeneralTab.setText(BaseMessages.getString(PKG, "SalesforceUpdateDialog.General.Tab"));
Composite wGeneralComp = new Composite(wTabFolder, SWT.NONE);
PropsUi.setLook(wGeneralComp);
FormLayout generalLayout = new FormLayout();
generalLayout.marginWidth = 3;
generalLayout.marginHeight = 3;
wGeneralComp.setLayout(generalLayout);
// ///////////////////////////////
// START OF Connection GROUP //
// ///////////////////////////////
Group wConnectionGroup = new Group(wGeneralComp, SWT.SHADOW_NONE);
PropsUi.setLook(wConnectionGroup);
wConnectionGroup.setText(
BaseMessages.getString(PKG, "SalesforceUpdateDialog.ConnectionGroup.Label"));
FormLayout connectionGroupLayout = new FormLayout();
connectionGroupLayout.marginWidth = 10;
connectionGroupLayout.marginHeight = 10;
wConnectionGroup.setLayout(connectionGroupLayout);
// Webservice URL
wURL =
new LabelTextVar(
variables,
wConnectionGroup,
BaseMessages.getString(PKG, "SalesforceUpdateDialog.URL.Label"),
BaseMessages.getString(PKG, "SalesforceUpdateDialog.URL.Tooltip"));
PropsUi.setLook(wURL);
wURL.addModifyListener(lsMod);
FormData fdURL = new FormData();
fdURL.left = new FormAttachment(0, 0);
fdURL.top = new FormAttachment(wTransformName, margin);
fdURL.right = new FormAttachment(100, 0);
wURL.setLayoutData(fdURL);
// UserName line
wUserName =
new LabelTextVar(
variables,
wConnectionGroup,
BaseMessages.getString(PKG, "SalesforceUpdateDialog.User.Label"),
BaseMessages.getString(PKG, "SalesforceUpdateDialog.User.Tooltip"));
PropsUi.setLook(wUserName);
wUserName.addModifyListener(lsMod);
FormData fdUserName = new FormData();
fdUserName.left = new FormAttachment(0, 0);
fdUserName.top = new FormAttachment(wURL, margin);
fdUserName.right = new FormAttachment(100, 0);
wUserName.setLayoutData(fdUserName);
// Password line
wPassword =
new LabelTextVar(
variables,
wConnectionGroup,
BaseMessages.getString(PKG, "SalesforceUpdateDialog.Password.Label"),
BaseMessages.getString(PKG, "SalesforceUpdateDialog.Password.Tooltip"),
true);
PropsUi.setLook(wPassword);
wPassword.addModifyListener(lsMod);
FormData fdPassword = new FormData();
fdPassword.left = new FormAttachment(0, 0);
fdPassword.top = new FormAttachment(wUserName, margin);
fdPassword.right = new FormAttachment(100, 0);
wPassword.setLayoutData(fdPassword);
// Test Salesforce connection button
Button wTest = new Button(wConnectionGroup, SWT.PUSH);
wTest.setText(BaseMessages.getString(PKG, "SalesforceUpdateDialog.TestConnection.Label"));
PropsUi.setLook(wTest);
FormData fdTest = new FormData();
wTest.setToolTipText(
BaseMessages.getString(PKG, "SalesforceUpdateDialog.TestConnection.Tooltip"));
fdTest.top = new FormAttachment(wPassword, margin);
fdTest.right = new FormAttachment(100, 0);
wTest.setLayoutData(fdTest);
FormData fdConnectionGroup = new FormData();
fdConnectionGroup.left = new FormAttachment(0, margin);
fdConnectionGroup.top = new FormAttachment(wTransformName, margin);
fdConnectionGroup.right = new FormAttachment(100, -margin);
wConnectionGroup.setLayoutData(fdConnectionGroup);
// ///////////////////////////////
// END OF Connection GROUP //
// ///////////////////////////////
// ///////////////////////////////
// START OF Settings GROUP //
// ///////////////////////////////
Group wSettingsGroup = new Group(wGeneralComp, SWT.SHADOW_NONE);
PropsUi.setLook(wSettingsGroup);
wSettingsGroup.setText(
BaseMessages.getString(PKG, "SalesforceUpdateDialog.SettingsGroup.Label"));
FormLayout settingGroupLayout = new FormLayout();
settingGroupLayout.marginWidth = 10;
settingGroupLayout.marginHeight = 10;
wSettingsGroup.setLayout(settingGroupLayout);
// Timeout
Label wlTimeOut = new Label(wSettingsGroup, SWT.RIGHT);
wlTimeOut.setText(BaseMessages.getString(PKG, "SalesforceUpdateDialog.TimeOut.Label"));
PropsUi.setLook(wlTimeOut);
FormData fdlTimeOut = new FormData();
fdlTimeOut.left = new FormAttachment(0, 0);
fdlTimeOut.top = new FormAttachment(wSettingsGroup, margin);
fdlTimeOut.right = new FormAttachment(middle, -margin);
wlTimeOut.setLayoutData(fdlTimeOut);
wTimeOut = new TextVar(variables, wSettingsGroup, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
PropsUi.setLook(wTimeOut);
wTimeOut.addModifyListener(lsMod);
FormData fdTimeOut = new FormData();
fdTimeOut.left = new FormAttachment(middle, 0);
fdTimeOut.top = new FormAttachment(wSettingsGroup, margin);
fdTimeOut.right = new FormAttachment(100, 0);
wTimeOut.setLayoutData(fdTimeOut);
// Use compression?
Label wlUseCompression = new Label(wSettingsGroup, SWT.RIGHT);
wlUseCompression.setText(
BaseMessages.getString(PKG, "SalesforceUpdateDialog.UseCompression.Label"));
PropsUi.setLook(wlUseCompression);
FormData fdlUseCompression = new FormData();
fdlUseCompression.left = new FormAttachment(0, 0);
fdlUseCompression.top = new FormAttachment(wTimeOut, margin);
fdlUseCompression.right = new FormAttachment(middle, -margin);
wlUseCompression.setLayoutData(fdlUseCompression);
wUseCompression = new Button(wSettingsGroup, SWT.CHECK);
PropsUi.setLook(wUseCompression);
wUseCompression.setToolTipText(
BaseMessages.getString(PKG, "SalesforceUpdateDialog.UseCompression.Tooltip"));
FormData fdUseCompression = new FormData();
fdUseCompression.left = new FormAttachment(middle, 0);
fdUseCompression.top = new FormAttachment(wlUseCompression, 0, SWT.CENTER);
wUseCompression.setLayoutData(fdUseCompression);
wUseCompression.addSelectionListener(new ComponentSelectionListener(input));
// Rollback all changes on error?
Label wlRollbackAllChangesOnError = new Label(wSettingsGroup, SWT.RIGHT);
wlRollbackAllChangesOnError.setText(
BaseMessages.getString(PKG, "SalesforceUpdateDialog.RollbackAllChangesOnError.Label"));
PropsUi.setLook(wlRollbackAllChangesOnError);
FormData fdlRollbackAllChangesOnError = new FormData();
fdlRollbackAllChangesOnError.left = new FormAttachment(0, 0);
fdlRollbackAllChangesOnError.top = new FormAttachment(wUseCompression, margin);
fdlRollbackAllChangesOnError.right = new FormAttachment(middle, -margin);
wlRollbackAllChangesOnError.setLayoutData(fdlRollbackAllChangesOnError);
wRollbackAllChangesOnError = new Button(wSettingsGroup, SWT.CHECK);
wRollbackAllChangesOnError.addSelectionListener(new ComponentSelectionListener(input));
PropsUi.setLook(wRollbackAllChangesOnError);
wRollbackAllChangesOnError.setToolTipText(
BaseMessages.getString(PKG, "SalesforceUpdateDialog.RollbackAllChangesOnError.Tooltip"));
FormData fdRollbackAllChangesOnError = new FormData();
fdRollbackAllChangesOnError.left = new FormAttachment(middle, 0);
fdRollbackAllChangesOnError.top =
new FormAttachment(wlRollbackAllChangesOnError, 0, SWT.CENTER);
wRollbackAllChangesOnError.setLayoutData(fdRollbackAllChangesOnError);
// BatchSize value
Label wlBatchSize = new Label(wSettingsGroup, SWT.RIGHT);
wlBatchSize.setText(BaseMessages.getString(PKG, "SalesforceUpdateDialog.Limit.Label"));
PropsUi.setLook(wlBatchSize);
FormData fdlBatchSize = new FormData();
fdlBatchSize.left = new FormAttachment(0, 0);
fdlBatchSize.top = new FormAttachment(wRollbackAllChangesOnError, margin);
fdlBatchSize.right = new FormAttachment(middle, -margin);
wlBatchSize.setLayoutData(fdlBatchSize);
wBatchSize = new TextVar(variables, wSettingsGroup, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
PropsUi.setLook(wBatchSize);
wBatchSize.addModifyListener(lsMod);
FormData fdBatchSize = new FormData();
fdBatchSize.left = new FormAttachment(middle, 0);
fdBatchSize.top = new FormAttachment(wRollbackAllChangesOnError, margin);
fdBatchSize.right = new FormAttachment(100, 0);
wBatchSize.setLayoutData(fdBatchSize);
// Module
Label wlModule = new Label(wSettingsGroup, SWT.RIGHT);
wlModule.setText(BaseMessages.getString(PKG, "SalesforceUpdateDialog.Module.Label"));
PropsUi.setLook(wlModule);
FormData fdlModule = new FormData();
fdlModule.left = new FormAttachment(0, 0);
fdlModule.top = new FormAttachment(wBatchSize, margin);
fdlModule.right = new FormAttachment(middle, -margin);
wlModule.setLayoutData(fdlModule);
wModule = new ComboVar(variables, wSettingsGroup, SWT.SINGLE | SWT.READ_ONLY | SWT.BORDER);
wModule.setEditable(true);
PropsUi.setLook(wModule);
wModule.addModifyListener(lsTableMod);
wModule.addSelectionListener(lsSelection);
FormData fdModule = new FormData();
fdModule.left = new FormAttachment(middle, 0);
fdModule.top = new FormAttachment(wBatchSize, margin);
fdModule.right = new FormAttachment(100, -margin);
wModule.setLayoutData(fdModule);
wModule.addFocusListener(
new FocusListener() {
@Override
public void focusLost(org.eclipse.swt.events.FocusEvent e) {
getModulesListError = false;
}
@Override
public void focusGained(org.eclipse.swt.events.FocusEvent e) {
// check if the URL and login credentials passed and not just had error
if (Utils.isEmpty(wURL.getText())
|| Utils.isEmpty(wUserName.getText())
|| Utils.isEmpty(wPassword.getText())
|| (getModulesListError)) {
return;
}
Cursor busy = new Cursor(shell.getDisplay(), SWT.CURSOR_WAIT);
shell.setCursor(busy);
getModulesList();
shell.setCursor(null);
busy.dispose();
}
});
FormData fdSettingsGroup = new FormData();
fdSettingsGroup.left = new FormAttachment(0, margin);
fdSettingsGroup.top = new FormAttachment(wConnectionGroup, margin);
fdSettingsGroup.right = new FormAttachment(100, -margin);
wSettingsGroup.setLayoutData(fdSettingsGroup);
// ///////////////////////////////
// END OF Settings GROUP //
// ///////////////////////////////
// THE UPDATE/INSERT TABLE
Label wlReturn = new Label(wGeneralComp, SWT.NONE);
wlReturn.setText(BaseMessages.getString(PKG, "SalesforceUpdateDialog.UpdateFields.Label"));
PropsUi.setLook(wlReturn);
FormData fdlReturn = new FormData();
fdlReturn.left = new FormAttachment(0, 0);
fdlReturn.top = new FormAttachment(wSettingsGroup, margin);
wlReturn.setLayoutData(fdlReturn);
int upInsCols = 3;
int upInsRows = (input.getUpdateLookup() != null ? input.getUpdateLookup().length : 1);
ciReturn = new ColumnInfo[upInsCols];
ciReturn[0] =
new ColumnInfo(
BaseMessages.getString(PKG, "SalesforceUpdateDialog.ColumnInfo.TableField"),
ColumnInfo.COLUMN_TYPE_CCOMBO,
new String[] {""},
false);
ciReturn[1] =
new ColumnInfo(
BaseMessages.getString(PKG, "SalesforceUpdateDialog.ColumnInfo.StreamField"),
ColumnInfo.COLUMN_TYPE_CCOMBO,
new String[] {""},
false);
ciReturn[2] =
new ColumnInfo(
BaseMessages.getString(PKG, "SalesforceUpdateDialog.ColumnInfo.UseExternalId"),
ColumnInfo.COLUMN_TYPE_CCOMBO,
new String[] {"Y", "N"});
ciReturn[2].setToolTip(
BaseMessages.getString(PKG, "SalesforceUpdateDialog.ColumnInfo.UseExternalId.Tooltip"));
tableFieldColumns.add(ciReturn[0]);
wReturn =
new TableView(
variables,
wGeneralComp,
SWT.BORDER | SWT.FULL_SELECTION | SWT.MULTI | SWT.V_SCROLL | SWT.H_SCROLL,
ciReturn,
upInsRows,
lsMod,
props);
Button wDoMapping = new Button(wGeneralComp, SWT.PUSH);
wDoMapping.setText(BaseMessages.getString(PKG, "SalesforceUpdateDialog.EditMapping.Label"));
FormData fdDoMapping = new FormData();
fdDoMapping.top = new FormAttachment(wlReturn, margin);
fdDoMapping.right = new FormAttachment(100, 0);
wDoMapping.setLayoutData(fdDoMapping);
wDoMapping.addListener(SWT.Selection, e -> generateMappings());
FormData fdReturn = new FormData();
fdReturn.left = new FormAttachment(0, 0);
fdReturn.top = new FormAttachment(wlReturn, margin);
fdReturn.right = new FormAttachment(wDoMapping, -margin);
fdReturn.bottom = new FormAttachment(100, -2 * margin);
wReturn.setLayoutData(fdReturn);
Button wGetLU = new Button(wGeneralComp, SWT.PUSH);
wGetLU.setText(BaseMessages.getString(PKG, "SalesforceUpdateDialog.GetAndUpdateFields.Label"));
FormData fdGetLU = new FormData();
fdGetLU.top = new FormAttachment(wDoMapping, margin);
fdGetLU.left = new FormAttachment(wReturn, margin);
fdGetLU.right = new FormAttachment(100, 0);
wGetLU.setLayoutData(fdGetLU);
//
// Search the fields in the background
//
final Runnable runnable =
() -> {
TransformMeta transformMeta = pipelineMeta.findTransform(transformName);
if (transformMeta != null) {
try {
IRowMeta row = pipelineMeta.getPrevTransformFields(variables, transformMeta);
// Remember these fields...
for (int i = 0; i < row.size(); i++) {
inputFields.add(row.getValueMeta(i).getName());
}
setComboBoxes();
// Dislay in red missing field names
HopGui.getInstance()
.getDisplay()
.asyncExec(
() -> {
if (!wReturn.isDisposed()) {
for (int i = 0; i < wReturn.table.getItemCount(); i++) {
TableItem it = wReturn.table.getItem(i);
if (!Utils.isEmpty(it.getText(2))) {
if (!inputFields.contains(it.getText(2))) {
it.setBackground(GuiResource.getInstance().getColorRed());
}
}
}
}
});
} catch (HopException e) {
logError(BaseMessages.getString(PKG, "System.Dialog.GetFieldsFailed.Message"));
}
}
};
new Thread(runnable).start();
FormData fdGeneralComp = new FormData();
fdGeneralComp.left = new FormAttachment(0, 0);
fdGeneralComp.top = new FormAttachment(wTransformName, margin);
fdGeneralComp.right = new FormAttachment(100, 0);
fdGeneralComp.bottom = new FormAttachment(100, 0);
wGeneralComp.setLayoutData(fdGeneralComp);
wGeneralComp.layout();
wGeneralTab.setControl(wGeneralComp);
// THE BUTTONS
wOk = new Button(shell, SWT.PUSH);
wOk.setText(BaseMessages.getString(PKG, "System.Button.OK"));
wCancel = new Button(shell, SWT.PUSH);
wCancel.setText(BaseMessages.getString(PKG, "System.Button.Cancel"));
setButtonPositions(new Button[] {wOk, wCancel}, margin, null);
FormData fdTabFolder = new FormData();
fdTabFolder.left = new FormAttachment(0, 0);
fdTabFolder.top = new FormAttachment(wTransformName, margin);
fdTabFolder.right = new FormAttachment(100, 0);
fdTabFolder.bottom = new FormAttachment(wOk, -margin);
wTabFolder.setLayoutData(fdTabFolder);
// Add listeners
wOk.addListener(SWT.Selection, e -> ok());
wGetLU.addListener(SWT.Selection, e -> getUpdate());
wTest.addListener(SWT.Selection, e -> test());
wCancel.addListener(SWT.Selection, e -> cancel());
wTabFolder.setSelection(0);
getData(input);
input.setChanged(changed);
BaseDialog.defaultShellHandling(shell, c -> ok(), c -> cancel());
return transformName;
}
private void getUpdate() {
try {
IRowMeta r = pipelineMeta.getPrevTransformFields(variables, transformName);
if (r != null) {
ITableItemInsertListener listener =
(tableItem, v) -> {
tableItem.setText(3, "N");
return true;
};
BaseTransformDialog.getFieldsFromPrevious(
r, wReturn, 1, new int[] {1, 2}, new int[] {}, -1, -1, listener);
}
} catch (HopException ke) {
new ErrorDialog(
shell,
BaseMessages.getString(PKG, "SalesforceUpdateDialog.FailedToGetFields.DialogTitle"),
BaseMessages.getString(PKG, "SalesforceUpdateDialog.FailedToGetFields.DialogMessage"),
ke);
}
}
/**
* Read the data from the TextFileInputMeta object and show it in this dialog.
*
* @param in The SalesforceUpdateMeta object to obtain the data from.
*/
public void getData(SalesforceUpdateMeta in) {
wURL.setText(Const.NVL(in.getTargetUrl(), ""));
wUserName.setText(Const.NVL(in.getUsername(), ""));
wPassword.setText(Const.NVL(in.getPassword(), ""));
wBatchSize.setText(in.getBatchSize());
wModule.setText(Const.NVL(in.getModule(), "Account"));
wBatchSize.setText("" + in.getBatchSize());
if (isDebug()) {
logDebug(BaseMessages.getString(PKG, "SalesforceUpdateDialog.Log.GettingFieldsInfo"));
}
if (input.getUpdateLookup() != null) {
for (int i = 0; i < input.getUpdateLookup().length; i++) {
TableItem item = wReturn.table.getItem(i);
if (input.getUpdateLookup()[i] != null) {
item.setText(1, input.getUpdateLookup()[i]);
}
if (input.getUpdateStream()[i] != null) {
item.setText(2, input.getUpdateStream()[i]);
}
if (input.getUseExternalId()[i] == null || input.getUseExternalId()[i].booleanValue()) {
item.setText(3, "Y");
} else {
item.setText(3, "N");
}
}
}
wReturn.removeEmptyRows();
wReturn.setRowNums();
wReturn.optWidth(true);
wTimeOut.setText(Const.NVL(in.getTimeout(), SalesforceConnectionUtils.DEFAULT_TIMEOUT));
wUseCompression.setSelection(in.isCompression());
wRollbackAllChangesOnError.setSelection(in.isRollbackAllChangesOnError());
wTransformName.selectAll();
wTransformName.setFocus();
}
private void cancel() {
transformName = null;
input.setChanged(changed);
dispose();
}
private void ok() {
try {
getInfo(input);
} catch (HopException e) {
new ErrorDialog(
shell,
BaseMessages.getString(PKG, "SalesforceUpdateDialog.ErrorValidateData.DialogTitle"),
BaseMessages.getString(PKG, "SalesforceUpdateDialog.ErrorValidateData.DialogMessage"),
e);
}
dispose();
}
@Override
protected void getInfo(SalesforceTransformMeta in) throws HopException {
SalesforceUpdateMeta meta = (SalesforceUpdateMeta) in;
transformName = wTransformName.getText(); // return value
// copy info to SalesforceUpdateMeta class (input)
meta.setTargetUrl(Const.NVL(wURL.getText(), SalesforceConnectionUtils.TARGET_DEFAULT_URL));
meta.setUsername(wUserName.getText());
meta.setPassword(wPassword.getText());
meta.setModule(Const.NVL(wModule.getText(), "Account"));
meta.setBatchSize(wBatchSize.getText());
int nrFields = wReturn.nrNonEmpty();
meta.allocate(nrFields);
for (int i = 0; i < nrFields; i++) {
TableItem item = wReturn.getNonEmpty(i);
meta.getUpdateLookup()[i] = item.getText(1);
meta.getUpdateStream()[i] = item.getText(2);
meta.getUseExternalId()[i] = Boolean.valueOf("Y".equals(item.getText(3)));
}
meta.setCompression(wUseCompression.getSelection());
meta.setTimeout(Const.NVL(wTimeOut.getText(), "0"));
meta.setRollbackAllChangesOnError(wRollbackAllChangesOnError.getSelection());
}
// check if module, username is given
private boolean checkInput() {
if (Utils.isEmpty(wModule.getText())) {
MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR);
mb.setMessage(
BaseMessages.getString(PKG, "SalesforceUpdateDialog.ModuleMissing.DialogMessage"));
mb.setText(BaseMessages.getString(PKG, "System.Dialog.Error.Title"));
mb.open();
return false;
}
return checkUser();
}
// check if module, username is given
private boolean checkUser() {
if (Utils.isEmpty(wUserName.getText())) {
MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR);
mb.setMessage(
BaseMessages.getString(PKG, "SalesforceUpdateDialog.UsernameMissing.DialogMessage"));
mb.setText(BaseMessages.getString(PKG, "System.Dialog.Error.Title"));
mb.open();
return false;
}
return true;
}
private String[] getModuleFields() throws HopException {
SalesforceUpdateMeta meta = new SalesforceUpdateMeta();
getInfo(meta);
SalesforceConnection connection = null;
String url = variables.resolve(meta.getTargetUrl());
try {
String selectedModule = variables.resolve(meta.getModule());
// Define a new Salesforce connection
connection =
new SalesforceConnection(
log,
url,
variables.resolve(meta.getUsername()),
Utils.resolvePassword(variables, meta.getPassword()));
int realTimeOut = Const.toInt(variables.resolve(meta.getTimeout()), 0);
connection.setTimeOut(realTimeOut);
// connect to Salesforce
connection.connect();
// return fieldsname for the module
return connection.getFields(selectedModule);
} catch (Exception e) {
throw new HopException("Error getting fields from module [" + url + "]!", e);
} finally {
if (connection != null) {
try {
connection.close();
} catch (Exception e) {
/* Ignore */
}
}
}
}
/**
* Reads in the fields from the previous transforms and from the ONE next transform and opens an
* EnterMappingDialog with this information. After the user did the mapping, those information is
* put into the Select/Rename table.
*/
private void generateMappings() {
if (!checkInput()) {
return;
}
// Determine the source and target fields...
//
IRowMeta sourceFields;
IRowMeta targetFields = new RowMeta();
try {
sourceFields = pipelineMeta.getPrevTransformFields(variables, transformMeta);
} catch (HopException e) {
new ErrorDialog(
shell,
BaseMessages.getString(
PKG, "SalesforceUpdateDialog.DoMapping.UnableToFindSourceFields.Title"),
BaseMessages.getString(
PKG, "SalesforceUpdateDialog.DoMapping.UnableToFindSourceFields.Message"),
e);
return;
}
try {
String[] fields = getModuleFields();
for (int i = 0; i < fields.length; i++) {
targetFields.addValueMeta(new ValueMetaNone(fields[i]));
}
} catch (Exception e) {
new ErrorDialog(
shell,
BaseMessages.getString(
PKG, "SalesforceUpdateDialog.DoMapping.UnableToFindTargetFields.Title"),
BaseMessages.getString(
PKG, "SalesforceUpdateDialog.DoMapping.UnableToFindTargetFields.Message"),
e);
return;
}
String[] inputNames = new String[sourceFields.size()];
for (int i = 0; i < sourceFields.size(); i++) {
IValueMeta value = sourceFields.getValueMeta(i);
inputNames[i] = value.getName();
}
// Create the existing mapping list...
//
List<SourceToTargetMapping> mappings = new ArrayList<>();
StringBuffer missingSourceFields = new StringBuffer();
StringBuffer missingTargetFields = new StringBuffer();
int nrFields = wReturn.nrNonEmpty();
for (int i = 0; i < nrFields; i++) {
TableItem item = wReturn.getNonEmpty(i);
String source = item.getText(2);
String target = item.getText(1);
int sourceIndex = sourceFields.indexOfValue(source);
if (sourceIndex < 0) {
missingSourceFields.append(Const.CR + " " + source + " --> " + target);
}
int targetIndex = targetFields.indexOfValue(target);
if (targetIndex < 0) {
missingTargetFields.append(Const.CR + " " + source + " --> " + target);
}
if (sourceIndex < 0 || targetIndex < 0) {
continue;
}
SourceToTargetMapping mapping = new SourceToTargetMapping(sourceIndex, targetIndex);
mappings.add(mapping);
}
// show a confirm dialog if some missing field was found
//
if (!missingSourceFields.isEmpty() || !missingTargetFields.isEmpty()) {
String message = "";
if (!missingSourceFields.isEmpty()) {
message +=
BaseMessages.getString(
PKG,
"SalesforceUpdateDialog.DoMapping.SomeSourceFieldsNotFound",
missingSourceFields.toString())
+ Const.CR;
}
if (!missingTargetFields.isEmpty()) {
message +=
BaseMessages.getString(
PKG,
"SalesforceUpdateDialog.DoMapping.SomeTargetFieldsNotFound",
missingSourceFields.toString())
+ Const.CR;
}
message += Const.CR;
message +=
BaseMessages.getString(PKG, "SalesforceUpdateDialog.DoMapping.SomeFieldsNotFoundContinue")
+ Const.CR;
int answer =
BaseDialog.openMessageBox(
shell,
BaseMessages.getString(
PKG, "SalesforceUpdateDialog.DoMapping.SomeFieldsNotFoundTitle"),
message,
SWT.ICON_QUESTION | SWT.OK | SWT.CANCEL);
boolean goOn = (answer & SWT.OK) != 0;
if (!goOn) {
return;
}
}
EnterMappingDialog d =
new EnterMappingDialog(
SalesforceUpdateDialog.this.shell,
sourceFields.getFieldNames(),
targetFields.getFieldNames(),
mappings);
mappings = d.open();
// mappings == null if the user pressed cancel
//
if (mappings != null) {
// Clear and re-populate!
//
wReturn.table.removeAll();
wReturn.table.setItemCount(mappings.size());
for (int i = 0; i < mappings.size(); i++) {
SourceToTargetMapping mapping = mappings.get(i);
TableItem item = wReturn.table.getItem(i);
item.setText(2, sourceFields.getValueMeta(mapping.getSourcePosition()).getName());
item.setText(1, targetFields.getValueMeta(mapping.getTargetPosition()).getName());
}
wReturn.setRowNums();
wReturn.optWidth(true);
}
}
protected void setComboBoxes() {
// Something was changed in the row.
//
String[] fieldNames = ConstUi.sortFieldNames(inputFields);
// return fields
ciReturn[1].setComboValues(fieldNames);
}
private void getModulesList() {
if (!gotModule) {
SalesforceConnection connection = null;
try {
SalesforceUpdateMeta meta = new SalesforceUpdateMeta();
getInfo(meta);
String url = variables.resolve(meta.getTargetUrl());
String selectedField = meta.getModule();
wModule.removeAll();
// Define a new Salesforce connection
connection =
new SalesforceConnection(
log,
url,
variables.resolve(meta.getUsername()),
Utils.resolvePassword(variables, meta.getPassword()));
// connect to Salesforce
connection.connect();
// return
wModule.setItems(connection.getAllAvailableObjects(false));
if (!Utils.isEmpty(selectedField)) {
wModule.setText(selectedField);
}
gotModule = true;
getModulesListError = false;
} catch (Exception e) {
new ErrorDialog(
shell,
BaseMessages.getString(PKG, "SalesforceUpdateDialog.ErrorRetrieveModules.DialogTitle"),
BaseMessages.getString(
PKG, "SalesforceUpdateDialog.ErrorRetrieveData.ErrorRetrieveModules"),
e);
getModulesListError = true;
} finally {
if (connection != null) {
try {
connection.close();
} catch (Exception e) {
/* Ignore */
}
}
}
}
}
public void setModuleFieldCombo() {
if (gotFields) {
return;
}
gotFields = true;
Display display = shell.getDisplay();
if (!(display == null || display.isDisposed())) {
display.asyncExec(
() -> {
// clear
for (int i = 0; i < tableFieldColumns.size(); i++) {
ColumnInfo colInfo = tableFieldColumns.get(i);
colInfo.setComboValues(new String[] {});
}
if (wModule.isDisposed()) {
return;
}
String selectedModule = variables.resolve(wModule.getText());
if (!Utils.isEmpty(selectedModule)) {
try {
// loop through the objects and find build the list of fields
String[] fieldsName = getModuleFields();
if (fieldsName != null) {
for (int i = 0; i < tableFieldColumns.size(); i++) {
ColumnInfo colInfo = tableFieldColumns.get(i);
colInfo.setComboValues(fieldsName);
}
}
} catch (Exception e) {
for (int i = 0; i < tableFieldColumns.size(); i++) {
ColumnInfo colInfo = tableFieldColumns.get(i);
colInfo.setComboValues(new String[] {});
}
// ignore any errors here. drop downs will not be
// filled, but no problem for the user
}
}
});
}
}
}
|
googleapis/google-cloud-java | 37,080 | java-biglake/proto-google-cloud-biglake-v1/src/main/java/com/google/cloud/bigquery/biglake/v1/UpdateTableRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/bigquery/biglake/v1/metastore.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.bigquery.biglake.v1;
/**
*
*
* <pre>
* Request message for the UpdateTable method.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.biglake.v1.UpdateTableRequest}
*/
public final class UpdateTableRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1.UpdateTableRequest)
UpdateTableRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateTableRequest.newBuilder() to construct.
private UpdateTableRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateTableRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateTableRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.biglake.v1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1_UpdateTableRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.biglake.v1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1_UpdateTableRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.biglake.v1.UpdateTableRequest.class,
com.google.cloud.bigquery.biglake.v1.UpdateTableRequest.Builder.class);
}
private int bitField0_;
public static final int TABLE_FIELD_NUMBER = 1;
private com.google.cloud.bigquery.biglake.v1.Table table_;
/**
*
*
* <pre>
* Required. The table to update.
*
* The table's `name` field is used to identify the table to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Table table = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the table field is set.
*/
@java.lang.Override
public boolean hasTable() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The table to update.
*
* The table's `name` field is used to identify the table to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Table table = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The table.
*/
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.Table getTable() {
return table_ == null
? com.google.cloud.bigquery.biglake.v1.Table.getDefaultInstance()
: table_;
}
/**
*
*
* <pre>
* Required. The table to update.
*
* The table's `name` field is used to identify the table to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Table table = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.TableOrBuilder getTableOrBuilder() {
return table_ == null
? com.google.cloud.bigquery.biglake.v1.Table.getDefaultInstance()
: table_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getTable());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getTable());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.bigquery.biglake.v1.UpdateTableRequest)) {
return super.equals(obj);
}
com.google.cloud.bigquery.biglake.v1.UpdateTableRequest other =
(com.google.cloud.bigquery.biglake.v1.UpdateTableRequest) obj;
if (hasTable() != other.hasTable()) return false;
if (hasTable()) {
if (!getTable().equals(other.getTable())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasTable()) {
hash = (37 * hash) + TABLE_FIELD_NUMBER;
hash = (53 * hash) + getTable().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.bigquery.biglake.v1.UpdateTableRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1.UpdateTableRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1.UpdateTableRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1.UpdateTableRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1.UpdateTableRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1.UpdateTableRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1.UpdateTableRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1.UpdateTableRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1.UpdateTableRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1.UpdateTableRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1.UpdateTableRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1.UpdateTableRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.bigquery.biglake.v1.UpdateTableRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for the UpdateTable method.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.biglake.v1.UpdateTableRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1.UpdateTableRequest)
com.google.cloud.bigquery.biglake.v1.UpdateTableRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.biglake.v1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1_UpdateTableRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.biglake.v1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1_UpdateTableRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.biglake.v1.UpdateTableRequest.class,
com.google.cloud.bigquery.biglake.v1.UpdateTableRequest.Builder.class);
}
// Construct using com.google.cloud.bigquery.biglake.v1.UpdateTableRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getTableFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
table_ = null;
if (tableBuilder_ != null) {
tableBuilder_.dispose();
tableBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.bigquery.biglake.v1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1_UpdateTableRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.UpdateTableRequest getDefaultInstanceForType() {
return com.google.cloud.bigquery.biglake.v1.UpdateTableRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.UpdateTableRequest build() {
com.google.cloud.bigquery.biglake.v1.UpdateTableRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.UpdateTableRequest buildPartial() {
com.google.cloud.bigquery.biglake.v1.UpdateTableRequest result =
new com.google.cloud.bigquery.biglake.v1.UpdateTableRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.bigquery.biglake.v1.UpdateTableRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.table_ = tableBuilder_ == null ? table_ : tableBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.bigquery.biglake.v1.UpdateTableRequest) {
return mergeFrom((com.google.cloud.bigquery.biglake.v1.UpdateTableRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.bigquery.biglake.v1.UpdateTableRequest other) {
if (other == com.google.cloud.bigquery.biglake.v1.UpdateTableRequest.getDefaultInstance())
return this;
if (other.hasTable()) {
mergeTable(other.getTable());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getTableFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.bigquery.biglake.v1.Table table_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.biglake.v1.Table,
com.google.cloud.bigquery.biglake.v1.Table.Builder,
com.google.cloud.bigquery.biglake.v1.TableOrBuilder>
tableBuilder_;
/**
*
*
* <pre>
* Required. The table to update.
*
* The table's `name` field is used to identify the table to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Table table = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the table field is set.
*/
public boolean hasTable() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The table to update.
*
* The table's `name` field is used to identify the table to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Table table = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The table.
*/
public com.google.cloud.bigquery.biglake.v1.Table getTable() {
if (tableBuilder_ == null) {
return table_ == null
? com.google.cloud.bigquery.biglake.v1.Table.getDefaultInstance()
: table_;
} else {
return tableBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The table to update.
*
* The table's `name` field is used to identify the table to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Table table = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTable(com.google.cloud.bigquery.biglake.v1.Table value) {
if (tableBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
table_ = value;
} else {
tableBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The table to update.
*
* The table's `name` field is used to identify the table to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Table table = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTable(com.google.cloud.bigquery.biglake.v1.Table.Builder builderForValue) {
if (tableBuilder_ == null) {
table_ = builderForValue.build();
} else {
tableBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The table to update.
*
* The table's `name` field is used to identify the table to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Table table = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeTable(com.google.cloud.bigquery.biglake.v1.Table value) {
if (tableBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& table_ != null
&& table_ != com.google.cloud.bigquery.biglake.v1.Table.getDefaultInstance()) {
getTableBuilder().mergeFrom(value);
} else {
table_ = value;
}
} else {
tableBuilder_.mergeFrom(value);
}
if (table_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The table to update.
*
* The table's `name` field is used to identify the table to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Table table = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearTable() {
bitField0_ = (bitField0_ & ~0x00000001);
table_ = null;
if (tableBuilder_ != null) {
tableBuilder_.dispose();
tableBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The table to update.
*
* The table's `name` field is used to identify the table to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Table table = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.bigquery.biglake.v1.Table.Builder getTableBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTableFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The table to update.
*
* The table's `name` field is used to identify the table to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Table table = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.bigquery.biglake.v1.TableOrBuilder getTableOrBuilder() {
if (tableBuilder_ != null) {
return tableBuilder_.getMessageOrBuilder();
} else {
return table_ == null
? com.google.cloud.bigquery.biglake.v1.Table.getDefaultInstance()
: table_;
}
}
/**
*
*
* <pre>
* Required. The table to update.
*
* The table's `name` field is used to identify the table to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Table table = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.biglake.v1.Table,
com.google.cloud.bigquery.biglake.v1.Table.Builder,
com.google.cloud.bigquery.biglake.v1.TableOrBuilder>
getTableFieldBuilder() {
if (tableBuilder_ == null) {
tableBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.biglake.v1.Table,
com.google.cloud.bigquery.biglake.v1.Table.Builder,
com.google.cloud.bigquery.biglake.v1.TableOrBuilder>(
getTable(), getParentForChildren(), isClean());
table_ = null;
}
return tableBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1.UpdateTableRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1.UpdateTableRequest)
private static final com.google.cloud.bigquery.biglake.v1.UpdateTableRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1.UpdateTableRequest();
}
public static com.google.cloud.bigquery.biglake.v1.UpdateTableRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateTableRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateTableRequest>() {
@java.lang.Override
public UpdateTableRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateTableRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateTableRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.UpdateTableRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/httpcomponents-client | 36,567 | httpclient5-cache/src/test/java/org/apache/hc/client5/http/impl/cache/TestBasicHttpCache.java | /*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.hc.client5.http.impl.cache;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertSame;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import java.net.URI;
import java.time.Instant;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.hc.client5.http.HeadersMatcher;
import org.apache.hc.client5.http.cache.HttpCacheEntry;
import org.apache.hc.client5.http.classic.methods.HttpGet;
import org.apache.hc.client5.http.utils.DateUtils;
import org.apache.hc.core5.http.HttpHeaders;
import org.apache.hc.core5.http.HttpHost;
import org.apache.hc.core5.http.HttpRequest;
import org.apache.hc.core5.http.HttpResponse;
import org.apache.hc.core5.http.HttpStatus;
import org.apache.hc.core5.http.message.BasicHeader;
import org.apache.hc.core5.http.message.BasicHttpRequest;
import org.apache.hc.core5.http.message.BasicHttpResponse;
import org.apache.hc.core5.net.URIBuilder;
import org.apache.hc.core5.util.ByteArrayBuffer;
import org.hamcrest.MatcherAssert;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
class TestBasicHttpCache {
private HttpHost host;
private Instant now;
private Instant tenSecondsAgo;
private SimpleHttpCacheStorage backing;
private BasicHttpCache impl;
@BeforeEach
void setUp() {
host = new HttpHost("foo.example.com");
now = Instant.now();
tenSecondsAgo = now.minusSeconds(10);
backing = Mockito.spy(new SimpleHttpCacheStorage());
impl = new BasicHttpCache(new HeapResourceFactory(), backing);
}
@Test
void testGetCacheEntryReturnsNullOnCacheMiss() {
final HttpHost host = new HttpHost("foo.example.com");
final HttpRequest request = new HttpGet("http://foo.example.com/bar");
final CacheMatch result = impl.match(host, request);
assertNull(result);
}
@Test
void testGetCacheEntryFetchesFromCacheOnCacheHitIfNoVariants() {
final HttpCacheEntry entry = HttpTestUtils.makeCacheEntry();
assertFalse(entry.hasVariants());
final HttpHost host = new HttpHost("foo.example.com");
final HttpRequest request = new HttpGet("http://foo.example.com/bar");
final String key = CacheKeyGenerator.INSTANCE.generateKey(host, request);
backing.map.put(key,entry);
final CacheMatch result = impl.match(host, request);
assertNotNull(result);
assertNotNull(result.hit);
assertSame(entry, result.hit.entry);
}
@Test
void testGetCacheEntryReturnsNullIfNoVariantInCache() {
final HttpRequest origRequest = new HttpGet("http://foo.example.com/bar");
origRequest.setHeader("Accept-Encoding","gzip");
final ByteArrayBuffer buf = HttpTestUtils.makeRandomBuffer(128);
final HttpResponse origResponse = new BasicHttpResponse(HttpStatus.SC_OK, "OK");
origResponse.setHeader("Date", DateUtils.formatStandardDate(now));
origResponse.setHeader("Cache-Control", "max-age=3600, public");
origResponse.setHeader("ETag", "\"etag\"");
origResponse.setHeader("Vary", "Accept-Encoding");
origResponse.setHeader("Content-Encoding","gzip");
impl.store(host, origRequest, origResponse, buf, now, now);
final HttpRequest request = new HttpGet("http://foo.example.com/bar");
final CacheMatch result = impl.match(host, request);
assertNotNull(result);
assertNull(result.hit);
}
@Test
void testGetCacheEntryReturnsVariantIfPresentInCache() {
final HttpRequest origRequest = new HttpGet("http://foo.example.com/bar");
origRequest.setHeader("Accept-Encoding","gzip");
final ByteArrayBuffer buf = HttpTestUtils.makeRandomBuffer(128);
final HttpResponse origResponse = new BasicHttpResponse(HttpStatus.SC_OK, "OK");
origResponse.setHeader("Date", DateUtils.formatStandardDate(now));
origResponse.setHeader("Cache-Control", "max-age=3600, public");
origResponse.setHeader("ETag", "\"etag\"");
origResponse.setHeader("Vary", "Accept-Encoding");
origResponse.setHeader("Content-Encoding","gzip");
impl.store(host, origRequest, origResponse, buf, now, now);
final HttpRequest request = new HttpGet("http://foo.example.com/bar");
request.setHeader("Accept-Encoding","gzip");
final CacheMatch result = impl.match(host, request);
assertNotNull(result);
assertNotNull(result.hit);
}
@Test
void testGetCacheEntryReturnsVariantWithMostRecentDateHeader() {
final HttpRequest origRequest = new HttpGet("http://foo.example.com/bar");
origRequest.setHeader("Accept-Encoding", "gzip");
final ByteArrayBuffer buf = HttpTestUtils.makeRandomBuffer(128);
// Create two response variants with different Date headers
final HttpResponse origResponse1 = new BasicHttpResponse(HttpStatus.SC_OK, "OK");
origResponse1.setHeader(HttpHeaders.DATE, DateUtils.formatStandardDate(now.minusSeconds(3600)));
origResponse1.setHeader(HttpHeaders.CACHE_CONTROL, "max-age=3600, public");
origResponse1.setHeader(HttpHeaders.ETAG, "\"etag1\"");
origResponse1.setHeader(HttpHeaders.VARY, "Accept-Encoding");
final HttpResponse origResponse2 = new BasicHttpResponse(HttpStatus.SC_OK, "OK");
origResponse2.setHeader(HttpHeaders.DATE, DateUtils.formatStandardDate(now));
origResponse2.setHeader(HttpHeaders.CACHE_CONTROL, "max-age=3600, public");
origResponse2.setHeader(HttpHeaders.ETAG, "\"etag2\"");
origResponse2.setHeader(HttpHeaders.VARY, "Accept-Encoding");
// Store the two variants in cache
impl.store(host, origRequest, origResponse1, buf, now, now);
impl.store(host, origRequest, origResponse2, buf, now, now);
final HttpRequest request = new HttpGet("http://foo.example.com/bar");
request.setHeader("Accept-Encoding", "gzip");
final CacheMatch result = impl.match(host, request);
assertNotNull(result);
assertNotNull(result.hit);
final HttpCacheEntry entry = result.hit.entry;
assertNotNull(entry);
// Retrieve the ETag header value from the original response and assert that
// the returned cache entry has the same ETag value
final String expectedEtag = origResponse2.getFirstHeader(HttpHeaders.ETAG).getValue();
final String actualEtag = entry.getFirstHeader(HttpHeaders.ETAG).getValue();
assertEquals(expectedEtag, actualEtag);
}
@Test
void testGetVariantsRootNoVariants() {
final HttpCacheEntry root = HttpTestUtils.makeCacheEntry();
final List<CacheHit> variants = impl.getVariants(new CacheHit("root-key", root));
assertNotNull(variants);
assertEquals(0, variants.size());
}
@Test
void testGetVariantsRootNonExistentVariants() {
final Set<String> varinats = new HashSet<>();
varinats.add("variant1");
varinats.add("variant2");
final HttpCacheEntry root = HttpTestUtils.makeCacheEntry(varinats);
final List<CacheHit> variants = impl.getVariants(new CacheHit("root-key", root));
assertNotNull(variants);
assertEquals(0, variants.size());
}
@Test
void testGetVariantCacheEntriesReturnsAllVariants() throws Exception {
final HttpHost host = new HttpHost("foo.example.com");
final URI uri = new URI("http://foo.example.com/bar");
final HttpRequest req1 = new HttpGet(uri);
req1.setHeader("Accept-Encoding", "gzip");
final String rootKey = CacheKeyGenerator.INSTANCE.generateKey(uri);
final HttpResponse resp1 = HttpTestUtils.make200Response();
resp1.setHeader("Date", DateUtils.formatStandardDate(now));
resp1.setHeader("Cache-Control", "max-age=3600, public");
resp1.setHeader("ETag", "\"etag1\"");
resp1.setHeader("Vary", "Accept-Encoding");
resp1.setHeader("Content-Encoding","gzip");
final HttpRequest req2 = new HttpGet(uri);
req2.setHeader("Accept-Encoding", "identity");
final HttpResponse resp2 = HttpTestUtils.make200Response();
resp2.setHeader("Date", DateUtils.formatStandardDate(now));
resp2.setHeader("Cache-Control", "max-age=3600, public");
resp2.setHeader("ETag", "\"etag2\"");
resp2.setHeader("Vary", "Accept-Encoding");
resp2.setHeader("Content-Encoding","gzip");
final CacheHit hit1 = impl.store(host, req1, resp1, null, now, now);
final CacheHit hit2 = impl.store(host, req2, resp2, null, now, now);
final Set<String> variants = new HashSet<>();
variants.add("{accept-encoding=gzip}");
variants.add("{accept-encoding=identity}");
final Map<String, HttpCacheEntry> variantMap = impl.getVariants(new CacheHit(hit1.rootKey,
HttpTestUtils.makeCacheEntry(variants))).stream()
.collect(Collectors.toMap(CacheHit::getEntryKey, e -> e.entry));
assertNotNull(variantMap);
assertEquals(2, variantMap.size());
MatcherAssert.assertThat(variantMap.get("{accept-encoding=gzip}" + rootKey),
HttpCacheEntryMatcher.equivalent(hit1.entry));
MatcherAssert.assertThat(variantMap.get("{accept-encoding=identity}" + rootKey),
HttpCacheEntryMatcher.equivalent(hit2.entry));
}
@Test
void testUpdateCacheEntry() throws Exception {
final HttpHost host = new HttpHost("foo.example.com");
final URI uri = new URI("http://foo.example.com/bar");
final HttpRequest req1 = new HttpGet(uri);
final HttpResponse resp1 = HttpTestUtils.make200Response();
resp1.setHeader("Date", DateUtils.formatStandardDate(tenSecondsAgo));
resp1.setHeader("Cache-Control", "max-age=3600, public");
resp1.setHeader("ETag", "\"etag1\"");
resp1.setHeader("Content-Encoding","gzip");
final HttpRequest revalidate = new HttpGet(uri);
revalidate.setHeader("If-None-Match","\"etag1\"");
final HttpResponse resp2 = HttpTestUtils.make304Response();
resp2.setHeader("Date", DateUtils.formatStandardDate(now));
resp2.setHeader("Cache-Control", "max-age=3600, public");
final CacheHit hit1 = impl.store(host, req1, resp1, null, now, now);
Assertions.assertNotNull(hit1);
Assertions.assertEquals(1, backing.map.size());
Assertions.assertSame(hit1.entry, backing.map.get(hit1.getEntryKey()));
final CacheHit updated = impl.update(hit1, host, req1, resp2, now, now);
Assertions.assertNotNull(updated);
Assertions.assertEquals(1, backing.map.size());
Assertions.assertSame(updated.entry, backing.map.get(hit1.getEntryKey()));
MatcherAssert.assertThat(
updated.entry.getHeaders(),
HeadersMatcher.same(
new BasicHeader("Server", "MockOrigin/1.0"),
new BasicHeader("ETag", "\"etag1\""),
new BasicHeader("Content-Encoding","gzip"),
new BasicHeader("Date", DateUtils.formatStandardDate(now)),
new BasicHeader("Cache-Control", "max-age=3600, public")
));
}
@Test
void testUpdateVariantCacheEntry() throws Exception {
final HttpHost host = new HttpHost("foo.example.com");
final URI uri = new URI("http://foo.example.com/bar");
final HttpRequest req1 = new HttpGet(uri);
req1.setHeader("User-Agent", "agent1");
final HttpResponse resp1 = HttpTestUtils.make200Response();
resp1.setHeader("Date", DateUtils.formatStandardDate(tenSecondsAgo));
resp1.setHeader("Cache-Control", "max-age=3600, public");
resp1.setHeader("ETag", "\"etag1\"");
resp1.setHeader("Content-Encoding","gzip");
resp1.setHeader("Vary", "User-Agent");
final HttpRequest revalidate = new HttpGet(uri);
revalidate.setHeader("If-None-Match","\"etag1\"");
final HttpResponse resp2 = HttpTestUtils.make304Response();
resp2.setHeader("Date", DateUtils.formatStandardDate(now));
resp2.setHeader("Cache-Control", "max-age=3600, public");
final CacheHit hit1 = impl.store(host, req1, resp1, null, now, now);
Assertions.assertNotNull(hit1);
Assertions.assertEquals(2, backing.map.size());
Assertions.assertSame(hit1.entry, backing.map.get(hit1.getEntryKey()));
final CacheHit updated = impl.update(hit1, host, req1, resp2, now, now);
Assertions.assertNotNull(updated);
Assertions.assertEquals(2, backing.map.size());
Assertions.assertSame(updated.entry, backing.map.get(hit1.getEntryKey()));
MatcherAssert.assertThat(
updated.entry.getHeaders(),
HeadersMatcher.same(
new BasicHeader("Server", "MockOrigin/1.0"),
new BasicHeader("ETag", "\"etag1\""),
new BasicHeader("Content-Encoding","gzip"),
new BasicHeader("Vary","User-Agent"),
new BasicHeader("Date", DateUtils.formatStandardDate(now)),
new BasicHeader("Cache-Control", "max-age=3600, public")
));
}
@Test
void testUpdateCacheEntryTurnsVariant() throws Exception {
final HttpHost host = new HttpHost("foo.example.com");
final URI uri = new URI("http://foo.example.com/bar");
final HttpRequest req1 = new HttpGet(uri);
req1.setHeader("User-Agent", "agent1");
final HttpResponse resp1 = HttpTestUtils.make200Response();
resp1.setHeader("Date", DateUtils.formatStandardDate(tenSecondsAgo));
resp1.setHeader("Cache-Control", "max-age=3600, public");
resp1.setHeader("ETag", "\"etag1\"");
resp1.setHeader("Content-Encoding","gzip");
final HttpRequest revalidate = new HttpGet(uri);
revalidate.setHeader("If-None-Match","\"etag1\"");
final HttpResponse resp2 = HttpTestUtils.make304Response();
resp2.setHeader("Date", DateUtils.formatStandardDate(now));
resp2.setHeader("Cache-Control", "max-age=3600, public");
resp2.setHeader("Vary", "User-Agent");
final CacheHit hit1 = impl.store(host, req1, resp1, null, now, now);
Assertions.assertNotNull(hit1);
Assertions.assertEquals(1, backing.map.size());
Assertions.assertSame(hit1.entry, backing.map.get(hit1.getEntryKey()));
final CacheHit updated = impl.update(hit1, host, req1, resp2, now, now);
Assertions.assertNotNull(updated);
Assertions.assertEquals(2, backing.map.size());
MatcherAssert.assertThat(
updated.entry.getHeaders(),
HeadersMatcher.same(
new BasicHeader("Server", "MockOrigin/1.0"),
new BasicHeader("ETag", "\"etag1\""),
new BasicHeader("Content-Encoding","gzip"),
new BasicHeader("Date", DateUtils.formatStandardDate(now)),
new BasicHeader("Cache-Control", "max-age=3600, public"),
new BasicHeader("Vary","User-Agent")));
}
@Test
void testStoreFromNegotiatedVariant() throws Exception {
final HttpHost host = new HttpHost("foo.example.com");
final URI uri = new URI("http://foo.example.com/bar");
final HttpRequest req1 = new HttpGet(uri);
req1.setHeader("User-Agent", "agent1");
final HttpResponse resp1 = HttpTestUtils.make200Response();
resp1.setHeader("Date", DateUtils.formatStandardDate(tenSecondsAgo));
resp1.setHeader("Cache-Control", "max-age=3600, public");
resp1.setHeader("ETag", "\"etag1\"");
resp1.setHeader("Content-Encoding","gzip");
resp1.setHeader("Vary", "User-Agent");
final CacheHit hit1 = impl.store(host, req1, resp1, null, now, now);
Assertions.assertNotNull(hit1);
Assertions.assertEquals(2, backing.map.size());
Assertions.assertSame(hit1.entry, backing.map.get(hit1.getEntryKey()));
final HttpRequest req2 = new HttpGet(uri);
req2.setHeader("User-Agent", "agent2");
final HttpResponse resp2 = HttpTestUtils.make304Response();
resp2.setHeader("Date", DateUtils.formatStandardDate(now));
resp2.setHeader("Cache-Control", "max-age=3600, public");
final CacheHit hit2 = impl.storeFromNegotiated(hit1, host, req2, resp2, now, now);
Assertions.assertNotNull(hit2);
Assertions.assertEquals(3, backing.map.size());
MatcherAssert.assertThat(
hit2.entry.getHeaders(),
HeadersMatcher.same(
new BasicHeader("Server", "MockOrigin/1.0"),
new BasicHeader("ETag", "\"etag1\""),
new BasicHeader("Content-Encoding","gzip"),
new BasicHeader("Vary","User-Agent"),
new BasicHeader("Date", DateUtils.formatStandardDate(now)),
new BasicHeader("Cache-Control", "max-age=3600, public")));
}
@Test
void testInvalidatesUnsafeRequests() throws Exception {
final HttpRequest request = new BasicHttpRequest("POST","/path");
final String key = CacheKeyGenerator.INSTANCE.generateKey(host, request);
final HttpResponse response = HttpTestUtils.make200Response();
backing.putEntry(key, HttpTestUtils.makeCacheEntry());
impl.evictInvalidatedEntries(host, request, response);
verify(backing).getEntry(key);
verify(backing).removeEntry(key);
Assertions.assertNull(backing.getEntry(key));
}
@Test
void testDoesNotInvalidateSafeRequests() {
final HttpRequest request1 = new BasicHttpRequest("GET","/");
final HttpResponse response1 = HttpTestUtils.make200Response();
impl.evictInvalidatedEntries(host, request1, response1);
verifyNoMoreInteractions(backing);
final HttpRequest request2 = new BasicHttpRequest("HEAD","/");
final HttpResponse response2 = HttpTestUtils.make200Response();
impl.evictInvalidatedEntries(host, request2, response2);
verifyNoMoreInteractions(backing);
}
@Test
void testInvalidatesUnsafeRequestsWithVariants() throws Exception {
final HttpRequest request = new BasicHttpRequest("POST","/path");
final String rootKey = CacheKeyGenerator.INSTANCE.generateKey(host, request);
final Set<String> variants = new HashSet<>();
variants.add("{var1}");
variants.add("{var2}");
final String variantKey1 = "{var1}" + rootKey;
final String variantKey2 = "{var2}" + rootKey;
final HttpResponse response = HttpTestUtils.make200Response();
backing.putEntry(rootKey, HttpTestUtils.makeCacheEntry(variants));
backing.putEntry(variantKey1, HttpTestUtils.makeCacheEntry());
backing.putEntry(variantKey2, HttpTestUtils.makeCacheEntry());
impl.evictInvalidatedEntries(host, request, response);
verify(backing).getEntry(rootKey);
verify(backing).removeEntry(rootKey);
verify(backing).removeEntry(variantKey1);
verify(backing).removeEntry(variantKey2);
Assertions.assertNull(backing.getEntry(rootKey));
Assertions.assertNull(backing.getEntry(variantKey1));
Assertions.assertNull(backing.getEntry(variantKey2));
}
@Test
void testInvalidateUriSpecifiedByContentLocationAndFresher() throws Exception {
final HttpRequest request = new BasicHttpRequest("PUT", "/foo");
final String rootKey = CacheKeyGenerator.INSTANCE.generateKey(host, request);
final URI contentUri = new URIBuilder()
.setHttpHost(host)
.setPath("/bar")
.build();
final String contentKey = CacheKeyGenerator.INSTANCE.generateKey(contentUri);
final HttpResponse response = HttpTestUtils.make200Response();
response.setHeader("ETag","\"new-etag\"");
response.setHeader("Date", DateUtils.formatStandardDate(now));
response.setHeader("Content-Location", contentUri.toASCIIString());
backing.putEntry(rootKey, HttpTestUtils.makeCacheEntry());
backing.putEntry(contentKey, HttpTestUtils.makeCacheEntry(
new BasicHeader("Date", DateUtils.formatStandardDate(tenSecondsAgo)),
new BasicHeader("ETag", "\"old-etag\"")
));
impl.evictInvalidatedEntries(host, request, response);
verify(backing).getEntry(rootKey);
verify(backing).removeEntry(rootKey);
verify(backing).getEntry(contentKey);
verify(backing).removeEntry(contentKey);
}
@Test
void testInvalidateUriSpecifiedByLocationAndFresher() throws Exception {
final HttpRequest request = new BasicHttpRequest("PUT", "/foo");
final String rootKey = CacheKeyGenerator.INSTANCE.generateKey(host, request);
final URI contentUri = new URIBuilder()
.setHttpHost(host)
.setPath("/bar")
.build();
final String contentKey = CacheKeyGenerator.INSTANCE.generateKey(contentUri);
final HttpResponse response = HttpTestUtils.make200Response();
response.setHeader("ETag","\"new-etag\"");
response.setHeader("Date", DateUtils.formatStandardDate(now));
response.setHeader("Location", contentUri.toASCIIString());
backing.putEntry(rootKey, HttpTestUtils.makeCacheEntry());
backing.putEntry(contentKey, HttpTestUtils.makeCacheEntry(
new BasicHeader("Date", DateUtils.formatStandardDate(tenSecondsAgo)),
new BasicHeader("ETag", "\"old-etag\"")
));
impl.evictInvalidatedEntries(host, request, response);
verify(backing).getEntry(rootKey);
verify(backing).removeEntry(rootKey);
verify(backing).getEntry(contentKey);
verify(backing).removeEntry(contentKey);
}
@Test
void testDoesNotInvalidateForUnsuccessfulResponse() throws Exception {
final HttpRequest request = new BasicHttpRequest("PUT", "/foo");
final URI contentUri = new URIBuilder()
.setHttpHost(host)
.setPath("/bar")
.build();
final HttpResponse response = HttpTestUtils.make500Response();
response.setHeader("ETag","\"new-etag\"");
response.setHeader("Date", DateUtils.formatStandardDate(now));
response.setHeader("Content-Location", contentUri.toASCIIString());
impl.evictInvalidatedEntries(host, request, response);
verifyNoMoreInteractions(backing);
}
@Test
void testInvalidateUriSpecifiedByContentLocationNonCanonical() throws Exception {
final HttpRequest request = new BasicHttpRequest("PUT", "/foo");
final String rootKey = CacheKeyGenerator.INSTANCE.generateKey(host, request);
final URI contentUri = new URIBuilder()
.setHttpHost(host)
.setPath("/bar")
.build();
final String contentKey = CacheKeyGenerator.INSTANCE.generateKey(contentUri);
final HttpResponse response = HttpTestUtils.make200Response();
response.setHeader("ETag","\"new-etag\"");
response.setHeader("Date", DateUtils.formatStandardDate(now));
response.setHeader("Content-Location", contentUri.toASCIIString());
backing.putEntry(rootKey, HttpTestUtils.makeCacheEntry());
backing.putEntry(contentKey, HttpTestUtils.makeCacheEntry(
new BasicHeader("Date", DateUtils.formatStandardDate(tenSecondsAgo)),
new BasicHeader("ETag", "\"old-etag\"")));
impl.evictInvalidatedEntries(host, request, response);
verify(backing).getEntry(rootKey);
verify(backing).removeEntry(rootKey);
verify(backing).getEntry(contentKey);
verify(backing).removeEntry(contentKey);
Assertions.assertNull(backing.getEntry(rootKey));
Assertions.assertNull(backing.getEntry(contentKey));
}
@Test
void testInvalidateUriSpecifiedByContentLocationRelative() throws Exception {
final HttpRequest request = new BasicHttpRequest("PUT", "/foo");
final String rootKey = CacheKeyGenerator.INSTANCE.generateKey(host, request);
final URI contentUri = new URIBuilder()
.setHttpHost(host)
.setPath("/bar")
.build();
final String contentKey = CacheKeyGenerator.INSTANCE.generateKey(contentUri);
final HttpResponse response = HttpTestUtils.make200Response();
response.setHeader("ETag","\"new-etag\"");
response.setHeader("Date", DateUtils.formatStandardDate(now));
response.setHeader("Content-Location", "/bar");
backing.putEntry(rootKey, HttpTestUtils.makeCacheEntry());
backing.putEntry(contentKey, HttpTestUtils.makeCacheEntry(
new BasicHeader("Date", DateUtils.formatStandardDate(tenSecondsAgo)),
new BasicHeader("ETag", "\"old-etag\"")));
impl.evictInvalidatedEntries(host, request, response);
verify(backing).getEntry(rootKey);
verify(backing).removeEntry(rootKey);
verify(backing).getEntry(contentKey);
verify(backing).removeEntry(contentKey);
Assertions.assertNull(backing.getEntry(rootKey));
Assertions.assertNull(backing.getEntry(contentKey));
}
@Test
void testDoesNotInvalidateUriSpecifiedByContentLocationOtherOrigin() throws Exception {
final HttpRequest request = new BasicHttpRequest("PUT", "/");
final URI contentUri = new URIBuilder()
.setHost("bar.example.com")
.setPath("/")
.build();
final String contentKey = CacheKeyGenerator.INSTANCE.generateKey(contentUri);
final HttpResponse response = HttpTestUtils.make200Response();
response.setHeader("ETag","\"new-etag\"");
response.setHeader("Date", DateUtils.formatStandardDate(now));
response.setHeader("Content-Location", contentUri.toASCIIString());
backing.putEntry(contentKey, HttpTestUtils.makeCacheEntry());
impl.evictInvalidatedEntries(host, request, response);
verify(backing, Mockito.never()).getEntry(contentKey);
verify(backing, Mockito.never()).removeEntry(contentKey);
}
@Test
void testDoesNotInvalidateUriSpecifiedByContentLocationIfEtagsMatch() throws Exception {
final HttpRequest request = new BasicHttpRequest("PUT", "/foo");
final URI contentUri = new URIBuilder()
.setHttpHost(host)
.setPath("/bar")
.build();
final String contentKey = CacheKeyGenerator.INSTANCE.generateKey(contentUri);
final HttpResponse response = HttpTestUtils.make200Response();
response.setHeader("ETag","\"same-etag\"");
response.setHeader("Date", DateUtils.formatStandardDate(now));
response.setHeader("Content-Location", contentUri.toASCIIString());
backing.putEntry(contentKey, HttpTestUtils.makeCacheEntry(
new BasicHeader("Date", DateUtils.formatStandardDate(tenSecondsAgo)),
new BasicHeader("ETag", "\"same-etag\"")));
impl.evictInvalidatedEntries(host, request, response);
verify(backing).getEntry(contentKey);
verify(backing, Mockito.never()).removeEntry(contentKey);
}
@Test
void testDoesNotInvalidateUriSpecifiedByContentLocationIfOlder() throws Exception {
final HttpRequest request = new BasicHttpRequest("PUT", "/foo");
final URI contentUri = new URIBuilder()
.setHttpHost(host)
.setPath("/bar")
.build();
final String contentKey = CacheKeyGenerator.INSTANCE.generateKey(contentUri);
final HttpResponse response = HttpTestUtils.make200Response();
response.setHeader("ETag","\"new-etag\"");
response.setHeader("Date", DateUtils.formatStandardDate(tenSecondsAgo));
response.setHeader("Content-Location", contentUri.toASCIIString());
backing.putEntry(contentKey, HttpTestUtils.makeCacheEntry(
new BasicHeader("Date", DateUtils.formatStandardDate(now)),
new BasicHeader("ETag", "\"old-etag\"")));
impl.evictInvalidatedEntries(host, request, response);
verify(backing).getEntry(contentKey);
verify(backing, Mockito.never()).removeEntry(contentKey);
}
@Test
void testDoesNotInvalidateUriSpecifiedByContentLocationIfResponseHasNoEtag() throws Exception {
final HttpRequest request = new BasicHttpRequest("PUT", "/foo");
final URI contentUri = new URIBuilder()
.setHttpHost(host)
.setPath("/bar")
.build();
final String contentKey = CacheKeyGenerator.INSTANCE.generateKey(contentUri);
final HttpResponse response = HttpTestUtils.make200Response();
response.removeHeaders("ETag");
response.setHeader("Date", DateUtils.formatStandardDate(now));
response.setHeader("Content-Location", contentUri.toASCIIString());
backing.putEntry(contentKey, HttpTestUtils.makeCacheEntry(
new BasicHeader("Date", DateUtils.formatStandardDate(tenSecondsAgo)),
new BasicHeader("ETag", "\"old-etag\"")));
impl.evictInvalidatedEntries(host, request, response);
verify(backing).getEntry(contentKey);
verify(backing, Mockito.never()).removeEntry(contentKey);
}
@Test
void testDoesNotInvalidateUriSpecifiedByContentLocationIfEntryHasNoEtag() throws Exception {
final HttpRequest request = new BasicHttpRequest("PUT", "/foo");
final URI contentUri = new URIBuilder()
.setHttpHost(host)
.setPath("/bar")
.build();
final String contentKey = CacheKeyGenerator.INSTANCE.generateKey(contentUri);
final HttpResponse response = HttpTestUtils.make200Response();
response.setHeader("ETag", "\"some-etag\"");
response.setHeader("Date", DateUtils.formatStandardDate(now));
response.setHeader("Content-Location", contentUri.toASCIIString());
backing.putEntry(contentKey, HttpTestUtils.makeCacheEntry(
new BasicHeader("Date", DateUtils.formatStandardDate(tenSecondsAgo))));
impl.evictInvalidatedEntries(host, request, response);
verify(backing).getEntry(contentKey);
verify(backing, Mockito.never()).removeEntry(contentKey);
}
@Test
void testInvalidatesUriSpecifiedByContentLocationIfResponseHasNoDate() throws Exception {
final HttpRequest request = new BasicHttpRequest("PUT", "/foo");
final URI contentUri = new URIBuilder()
.setHttpHost(host)
.setPath("/bar")
.build();
final String contentKey = CacheKeyGenerator.INSTANCE.generateKey(contentUri);
final HttpResponse response = HttpTestUtils.make200Response();
response.setHeader("ETag", "\"new-etag\"");
response.removeHeaders("Date");
response.setHeader("Content-Location", contentUri.toASCIIString());
backing.putEntry(contentKey, HttpTestUtils.makeCacheEntry(
new BasicHeader("ETag", "\"old-etag\""),
new BasicHeader("Date", DateUtils.formatStandardDate(tenSecondsAgo))));
impl.evictInvalidatedEntries(host, request, response);
verify(backing).getEntry(contentKey);
verify(backing).removeEntry(contentKey);
}
@Test
void testInvalidatesUriSpecifiedByContentLocationIfEntryHasNoDate() throws Exception {
final HttpRequest request = new BasicHttpRequest("PUT", "/foo");
final URI contentUri = new URIBuilder()
.setHttpHost(host)
.setPath("/bar")
.build();
final String contentKey = CacheKeyGenerator.INSTANCE.generateKey(contentUri);
final HttpResponse response = HttpTestUtils.make200Response();
response.setHeader("ETag","\"new-etag\"");
response.setHeader("Date", DateUtils.formatStandardDate(now));
response.setHeader("Content-Location", contentUri.toASCIIString());
backing.putEntry(contentKey, HttpTestUtils.makeCacheEntry(
new BasicHeader("ETag", "\"old-etag\"")));
impl.evictInvalidatedEntries(host, request, response);
verify(backing).getEntry(contentKey);
verify(backing).removeEntry(contentKey);
}
@Test
void testInvalidatesUriSpecifiedByContentLocationIfResponseHasMalformedDate() throws Exception {
final HttpRequest request = new BasicHttpRequest("PUT", "/foo");
final URI contentUri = new URIBuilder()
.setHttpHost(host)
.setPath("/bar")
.build();
final String contentKey = CacheKeyGenerator.INSTANCE.generateKey(contentUri);
final HttpResponse response = HttpTestUtils.make200Response();
response.setHeader("ETag","\"new-etag\"");
response.setHeader("Date", "huh?");
response.setHeader("Content-Location", contentUri.toASCIIString());
backing.putEntry(contentKey, HttpTestUtils.makeCacheEntry(
new BasicHeader("ETag", "\"old-etag\""),
new BasicHeader("Date", DateUtils.formatStandardDate(tenSecondsAgo))));
impl.evictInvalidatedEntries(host, request, response);
verify(backing).getEntry(contentKey);
verify(backing).removeEntry(contentKey);
}
@Test
void testInvalidatesUriSpecifiedByContentLocationIfEntryHasMalformedDate() throws Exception {
final HttpRequest request = new BasicHttpRequest("PUT", "/foo");
final URI contentUri = new URIBuilder()
.setHttpHost(host)
.setPath("/bar")
.build();
final String contentKey = CacheKeyGenerator.INSTANCE.generateKey(contentUri);
final HttpResponse response = HttpTestUtils.make200Response();
response.setHeader("ETag","\"new-etag\"");
response.setHeader("Date", DateUtils.formatStandardDate(now));
response.setHeader("Content-Location", contentUri.toASCIIString());
backing.putEntry(contentKey, HttpTestUtils.makeCacheEntry(
new BasicHeader("ETag", "\"old-etag\""),
new BasicHeader("Date", "huh?")));
impl.evictInvalidatedEntries(host, request, response);
verify(backing).getEntry(contentKey);
verify(backing).removeEntry(contentKey);
}
}
|
googleapis/google-cloud-java | 37,017 | java-discoveryengine/proto-google-cloud-discoveryengine-v1alpha/src/main/java/com/google/cloud/discoveryengine/v1alpha/ListEnginesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/discoveryengine/v1alpha/engine_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.discoveryengine.v1alpha;
/**
*
*
* <pre>
* Response message for
* [EngineService.ListEngines][google.cloud.discoveryengine.v1alpha.EngineService.ListEngines]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1alpha.ListEnginesResponse}
*/
public final class ListEnginesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1alpha.ListEnginesResponse)
ListEnginesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListEnginesResponse.newBuilder() to construct.
private ListEnginesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListEnginesResponse() {
engines_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListEnginesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1alpha.EngineServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_ListEnginesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1alpha.EngineServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_ListEnginesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse.class,
com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse.Builder.class);
}
public static final int ENGINES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.discoveryengine.v1alpha.Engine> engines_;
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1alpha.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Engine engines = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.discoveryengine.v1alpha.Engine> getEnginesList() {
return engines_;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1alpha.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Engine engines = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.discoveryengine.v1alpha.EngineOrBuilder>
getEnginesOrBuilderList() {
return engines_;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1alpha.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Engine engines = 1;</code>
*/
@java.lang.Override
public int getEnginesCount() {
return engines_.size();
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1alpha.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Engine engines = 1;</code>
*/
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.Engine getEngines(int index) {
return engines_.get(index);
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1alpha.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Engine engines = 1;</code>
*/
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.EngineOrBuilder getEnginesOrBuilder(int index) {
return engines_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Not supported.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Not supported.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < engines_.size(); i++) {
output.writeMessage(1, engines_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < engines_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, engines_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse)) {
return super.equals(obj);
}
com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse other =
(com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse) obj;
if (!getEnginesList().equals(other.getEnginesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getEnginesCount() > 0) {
hash = (37 * hash) + ENGINES_FIELD_NUMBER;
hash = (53 * hash) + getEnginesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [EngineService.ListEngines][google.cloud.discoveryengine.v1alpha.EngineService.ListEngines]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1alpha.ListEnginesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1alpha.ListEnginesResponse)
com.google.cloud.discoveryengine.v1alpha.ListEnginesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1alpha.EngineServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_ListEnginesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1alpha.EngineServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_ListEnginesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse.class,
com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse.Builder.class);
}
// Construct using com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (enginesBuilder_ == null) {
engines_ = java.util.Collections.emptyList();
} else {
engines_ = null;
enginesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.discoveryengine.v1alpha.EngineServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_ListEnginesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse
getDefaultInstanceForType() {
return com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse build() {
com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse buildPartial() {
com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse result =
new com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse result) {
if (enginesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
engines_ = java.util.Collections.unmodifiableList(engines_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.engines_ = engines_;
} else {
result.engines_ = enginesBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse) {
return mergeFrom((com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse other) {
if (other
== com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse.getDefaultInstance())
return this;
if (enginesBuilder_ == null) {
if (!other.engines_.isEmpty()) {
if (engines_.isEmpty()) {
engines_ = other.engines_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureEnginesIsMutable();
engines_.addAll(other.engines_);
}
onChanged();
}
} else {
if (!other.engines_.isEmpty()) {
if (enginesBuilder_.isEmpty()) {
enginesBuilder_.dispose();
enginesBuilder_ = null;
engines_ = other.engines_;
bitField0_ = (bitField0_ & ~0x00000001);
enginesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getEnginesFieldBuilder()
: null;
} else {
enginesBuilder_.addAllMessages(other.engines_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.discoveryengine.v1alpha.Engine m =
input.readMessage(
com.google.cloud.discoveryengine.v1alpha.Engine.parser(),
extensionRegistry);
if (enginesBuilder_ == null) {
ensureEnginesIsMutable();
engines_.add(m);
} else {
enginesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.discoveryengine.v1alpha.Engine> engines_ =
java.util.Collections.emptyList();
private void ensureEnginesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
engines_ =
new java.util.ArrayList<com.google.cloud.discoveryengine.v1alpha.Engine>(engines_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.discoveryengine.v1alpha.Engine,
com.google.cloud.discoveryengine.v1alpha.Engine.Builder,
com.google.cloud.discoveryengine.v1alpha.EngineOrBuilder>
enginesBuilder_;
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1alpha.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Engine engines = 1;</code>
*/
public java.util.List<com.google.cloud.discoveryengine.v1alpha.Engine> getEnginesList() {
if (enginesBuilder_ == null) {
return java.util.Collections.unmodifiableList(engines_);
} else {
return enginesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1alpha.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Engine engines = 1;</code>
*/
public int getEnginesCount() {
if (enginesBuilder_ == null) {
return engines_.size();
} else {
return enginesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1alpha.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Engine engines = 1;</code>
*/
public com.google.cloud.discoveryengine.v1alpha.Engine getEngines(int index) {
if (enginesBuilder_ == null) {
return engines_.get(index);
} else {
return enginesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1alpha.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Engine engines = 1;</code>
*/
public Builder setEngines(int index, com.google.cloud.discoveryengine.v1alpha.Engine value) {
if (enginesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEnginesIsMutable();
engines_.set(index, value);
onChanged();
} else {
enginesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1alpha.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Engine engines = 1;</code>
*/
public Builder setEngines(
int index, com.google.cloud.discoveryengine.v1alpha.Engine.Builder builderForValue) {
if (enginesBuilder_ == null) {
ensureEnginesIsMutable();
engines_.set(index, builderForValue.build());
onChanged();
} else {
enginesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1alpha.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Engine engines = 1;</code>
*/
public Builder addEngines(com.google.cloud.discoveryengine.v1alpha.Engine value) {
if (enginesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEnginesIsMutable();
engines_.add(value);
onChanged();
} else {
enginesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1alpha.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Engine engines = 1;</code>
*/
public Builder addEngines(int index, com.google.cloud.discoveryengine.v1alpha.Engine value) {
if (enginesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEnginesIsMutable();
engines_.add(index, value);
onChanged();
} else {
enginesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1alpha.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Engine engines = 1;</code>
*/
public Builder addEngines(
com.google.cloud.discoveryengine.v1alpha.Engine.Builder builderForValue) {
if (enginesBuilder_ == null) {
ensureEnginesIsMutable();
engines_.add(builderForValue.build());
onChanged();
} else {
enginesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1alpha.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Engine engines = 1;</code>
*/
public Builder addEngines(
int index, com.google.cloud.discoveryengine.v1alpha.Engine.Builder builderForValue) {
if (enginesBuilder_ == null) {
ensureEnginesIsMutable();
engines_.add(index, builderForValue.build());
onChanged();
} else {
enginesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1alpha.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Engine engines = 1;</code>
*/
public Builder addAllEngines(
java.lang.Iterable<? extends com.google.cloud.discoveryengine.v1alpha.Engine> values) {
if (enginesBuilder_ == null) {
ensureEnginesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, engines_);
onChanged();
} else {
enginesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1alpha.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Engine engines = 1;</code>
*/
public Builder clearEngines() {
if (enginesBuilder_ == null) {
engines_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
enginesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1alpha.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Engine engines = 1;</code>
*/
public Builder removeEngines(int index) {
if (enginesBuilder_ == null) {
ensureEnginesIsMutable();
engines_.remove(index);
onChanged();
} else {
enginesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1alpha.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Engine engines = 1;</code>
*/
public com.google.cloud.discoveryengine.v1alpha.Engine.Builder getEnginesBuilder(int index) {
return getEnginesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1alpha.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Engine engines = 1;</code>
*/
public com.google.cloud.discoveryengine.v1alpha.EngineOrBuilder getEnginesOrBuilder(int index) {
if (enginesBuilder_ == null) {
return engines_.get(index);
} else {
return enginesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1alpha.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Engine engines = 1;</code>
*/
public java.util.List<? extends com.google.cloud.discoveryengine.v1alpha.EngineOrBuilder>
getEnginesOrBuilderList() {
if (enginesBuilder_ != null) {
return enginesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(engines_);
}
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1alpha.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Engine engines = 1;</code>
*/
public com.google.cloud.discoveryengine.v1alpha.Engine.Builder addEnginesBuilder() {
return getEnginesFieldBuilder()
.addBuilder(com.google.cloud.discoveryengine.v1alpha.Engine.getDefaultInstance());
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1alpha.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Engine engines = 1;</code>
*/
public com.google.cloud.discoveryengine.v1alpha.Engine.Builder addEnginesBuilder(int index) {
return getEnginesFieldBuilder()
.addBuilder(index, com.google.cloud.discoveryengine.v1alpha.Engine.getDefaultInstance());
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1alpha.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Engine engines = 1;</code>
*/
public java.util.List<com.google.cloud.discoveryengine.v1alpha.Engine.Builder>
getEnginesBuilderList() {
return getEnginesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.discoveryengine.v1alpha.Engine,
com.google.cloud.discoveryengine.v1alpha.Engine.Builder,
com.google.cloud.discoveryengine.v1alpha.EngineOrBuilder>
getEnginesFieldBuilder() {
if (enginesBuilder_ == null) {
enginesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.discoveryengine.v1alpha.Engine,
com.google.cloud.discoveryengine.v1alpha.Engine.Builder,
com.google.cloud.discoveryengine.v1alpha.EngineOrBuilder>(
engines_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
engines_ = null;
}
return enginesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Not supported.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Not supported.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Not supported.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Not supported.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Not supported.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1alpha.ListEnginesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1alpha.ListEnginesResponse)
private static final com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse();
}
public static com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListEnginesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListEnginesResponse>() {
@java.lang.Override
public ListEnginesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListEnginesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListEnginesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.ListEnginesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/zeppelin | 37,051 | zeppelin-server/src/main/java/org/apache/zeppelin/realm/kerberos/KerberosRealm.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.realm.kerberos;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.Groups;
import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
import org.apache.hadoop.security.authentication.client.AuthenticationException;
import org.apache.hadoop.security.authentication.client.KerberosAuthenticator;
import org.apache.hadoop.security.authentication.server.AuthenticationHandler;
import org.apache.hadoop.security.authentication.server.AuthenticationToken;
import org.apache.commons.codec.binary.Base64;
import org.apache.hadoop.security.authentication.util.*;
import org.apache.shiro.SecurityUtils;
import org.apache.shiro.authc.AuthenticationInfo;
import org.apache.shiro.authc.SimpleAccount;
import org.apache.shiro.authz.AuthorizationException;
import org.apache.shiro.authz.AuthorizationInfo;
import org.apache.shiro.authz.SimpleAuthorizationInfo;
import org.apache.shiro.realm.AuthorizingRealm;
import org.apache.shiro.subject.PrincipalCollection;
import org.ietf.jgss.GSSException;
import org.ietf.jgss.GSSContext;
import org.ietf.jgss.GSSCredential;
import org.ietf.jgss.GSSManager;
import org.ietf.jgss.Oid;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.security.auth.Subject;
import javax.security.auth.kerberos.KerberosPrincipal;
import javax.security.auth.kerberos.KeyTab;
import jakarta.servlet.FilterChain;
import jakarta.servlet.ServletException;
import jakarta.servlet.ServletRequest;
import jakarta.servlet.ServletResponse;
import jakarta.servlet.http.Cookie;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletRequestWrapper;
import jakarta.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.IOException;
import java.security.Principal;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.regex.Pattern;
/**
* The {@link KerberosRealm} implements the Kerberos SPNEGO
* authentication mechanism for HTTP via Shiro.
* <p>
* The Shiro configuration section should be configured as:
* [main]
* krbRealm = org.apache.zeppelin.realm.kerberos.KerberosRealm
* krbRealm.principal = HTTP/zeppelin.fqdn.domain.com@EXAMPLE.COM
* krbRealm.keytab = /etc/security/keytabs/spnego.service.keytab
* krbRealm.nameRules = DEFAULT
* krbRealm.signatureSecretFile = /etc/security/http_secret
* krbRealm.tokenValidity = 36000
* krbRealm.cookieDomain = domain.com
* krbRealm.cookiePath = /
* krbRealm.logout = logout
* krbRealm.logoutAPI = true
* krbRealm.providerUrl = https://domain.example.com/
* krbRealm.redirectParam = originalUrl
* authc = org.apache.zeppelin.realm.kerberos.KerberosAuthenticationFilter
*
*/
public class KerberosRealm extends AuthorizingRealm {
private static final Logger LOGGER = LoggerFactory.getLogger(KerberosRealm.class);
// Configs to set in shiro.ini
private String principal = null;
private String keytab = null;
private String nameRules = "DEFAULT";
private long tokenMaxInactiveInterval = -1;
private long tokenValidity = 36000; // 10 hours
private String cookieDomain = null;
private String cookiePath = "/";
private boolean isCookiePersistent = false;
private String signatureSecretFile = null;
private String signatureSecretProvider = "file";
private String logout = "logout";
private Boolean logoutAPI = true;
private String providerUrl = "https://domain.example.com/";
private String redirectParam = "originalUrl";
/**
* Constant for the property that specifies the authentication handler to use.
*/
private static final String AUTH_TYPE = "type";
/**
* Constant for the property that specifies the secret to use for signing the HTTP Cookies.
*/
private static final String SIGNATURE_SECRET = "signature.secret";
private static final String SIGNATURE_SECRET_FILE = SIGNATURE_SECRET + ".file";
/**
* Constant for the configuration property
* that indicates the max inactive interval of the generated token.
* Currently this is NOT being used
* TODO(vr): Enable this when we move to Apache Hadoop 2.8+
*/
private static final String AUTH_TOKEN_MAX_INACTIVE_INTERVAL = "token.max-inactive-interval";
/**
* Constant for the configuration property that indicates the tokenValidity of the generated
* token.
*/
private static final String AUTH_TOKEN_VALIDITY = "token.tokenValidity";
/**
* Constant for the configuration property that indicates the domain to use in the HTTP cookie.
*/
private static final String COOKIE_DOMAIN = "cookie.domain";
/**
* Constant for the configuration property that indicates the path to use in the HTTP cookie.
*/
private static final String COOKIE_PATH = "cookie.path";
/**
* Constant for the configuration property
* that indicates the persistence of the HTTP cookie.
*/
private static final String COOKIE_PERSISTENT = "cookie.persistent";
/**
* Constant that identifies the authentication mechanism.
*/
public static final String TYPE = "kerberos";
/**
* Constant for the configuration property that indicates the kerberos
* principal.
*/
public static final String PRINCIPAL = TYPE + ".principal";
/**
* Constant for the configuration property that indicates the keytab
* file path.
*/
public static final String KEYTAB = TYPE + ".keytab";
/**
* Constant for the configuration property that indicates the Kerberos name
* rules for the Kerberos principals.
*/
public static final String NAME_RULES = TYPE + ".name.rules";
/**
* Constant for the configuration property that indicates the name of the
* SignerSecretProvider class to use.
* Possible values are: "file", "random"
* We are NOT supporting "zookeeper", or a custom classname, at the moment.
* If not specified, the "file" implementation will be used with
* SIGNATURE_SECRET_FILE; and if that's not specified, the "random"
* implementation will be used.
*/
private static final String SIGNER_SECRET_PROVIDER = "signer.secret.provider";
/**
* Constant for the configuration property that indicates the path to use to logout
*/
private static final String LOGOUT = "logout";
private static Signer signer = null;
private SignerSecretProvider secretProvider = null;
private boolean destroySecretProvider = true;
private GSSManager gssManager = null;
private Subject serverSubject = null;
private Properties config = null;
/**
* Hadoop Groups implementation.
*/
private Groups hadoopGroups;
@Override
public boolean supports(org.apache.shiro.authc.AuthenticationToken token) {
return token instanceof KerberosToken;
}
/**
* Initializes the KerberosRealm by 'kinit'ing using principal and keytab.
* <p>
* It creates a Kerberos context using the principal and keytab specified in
* the Shiro configuration.
* <p>
* This method should be called only once.
*
* @throws RuntimeException thrown if the handler could not be initialized.
*/
@Override
protected void onInit() {
super.onInit();
config = getConfiguration();
try {
if (principal == null || principal.trim().length() == 0) {
throw new RuntimeException("Principal not defined in configuration");
}
if (keytab == null || keytab.trim().length() == 0) {
throw new RuntimeException("Keytab not defined in configuration");
}
File keytabFile = new File(keytab);
if (!keytabFile.exists()) {
throw new RuntimeException("Keytab file does not exist: " + keytab);
}
// use all SPNEGO principals in the keytab if a principal isn't
// specifically configured
final String[] spnegoPrincipals;
if (principal.equals("*")) {
spnegoPrincipals = KerberosUtil.getPrincipalNames(
keytab, Pattern.compile("HTTP/.*"));
if (spnegoPrincipals.length == 0) {
throw new RuntimeException("Principals do not exist in the keytab");
}
} else {
spnegoPrincipals = new String[]{principal};
}
KeyTab keytabInstance = KeyTab.getInstance(keytabFile);
serverSubject = new Subject();
serverSubject.getPrivateCredentials().add(keytabInstance);
for (String spnegoPrincipal : spnegoPrincipals) {
Principal krbPrincipal = new KerberosPrincipal(spnegoPrincipal);
LOGGER.info("Using keytab {}, for principal {}",
keytab, krbPrincipal);
serverSubject.getPrincipals().add(krbPrincipal);
}
if (nameRules == null || nameRules.trim().length() == 0) {
LOGGER.warn("No auth_to_local rules defined, DEFAULT will be used.");
nameRules = "DEFAULT";
}
KerberosName.setRules(nameRules);
if (null == gssManager) {
try {
gssManager = Subject.doAs(serverSubject,
(PrivilegedExceptionAction<GSSManager>) GSSManager::getInstance);
LOGGER.trace("SPNEGO gssManager initialized.");
} catch (PrivilegedActionException ex) {
throw ex.getException();
}
}
if (null == signer) {
initializeSecretProvider();
}
Configuration hadoopConfig = new Configuration();
hadoopGroups = new Groups(hadoopConfig);
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
private void initializeSecretProvider() throws ServletException {
try {
secretProvider = constructSecretProvider();
destroySecretProvider = true;
signer = new Signer(secretProvider);
} catch (Exception ex) {
throw new ServletException(ex);
}
}
private SignerSecretProvider constructSecretProvider() throws Exception {
SignerSecretProvider provider;
String secretProvider = config.getProperty(SIGNER_SECRET_PROVIDER);
if (config.getProperty(SIGNATURE_SECRET_FILE) == null) {
secretProvider = "random";
}
if ("file".equals(secretProvider)) {
try {
provider = new FileSignerSecretProvider();
provider.init(config, null, tokenValidity);
LOGGER.info("File based secret signer initialized.");
} catch (Exception e) {
LOGGER.info("Unable to initialize FileSignerSecretProvider, " +
"falling back to use random secrets.");
provider = new RandomSignerSecretProvider();
provider.init(config, null, tokenValidity);
LOGGER.info("Random secret signer initialized.");
}
} else if ("random".equals(secretProvider)) {
provider = new RandomSignerSecretProvider();
provider.init(config, null, tokenValidity);
LOGGER.info("Random secret signer initialized.");
} else {
throw new RuntimeException(
"Custom secret signer not implemented yet. Use 'file' or 'random'.");
}
return provider;
}
/**
* This is an empty implementation, it always returns <code>TRUE</code>.
*
* @param token the authentication token if any, otherwise <code>NULL</code>.
* @param request the HTTP client request.
* @param response the HTTP client response.
*
* @return <code>TRUE</code>
*/
public boolean managementOperation(AuthenticationToken token,
HttpServletRequest request,
HttpServletResponse response) {
return true;
}
/**
* Returns the group mapping for the provided user as per Hadoop {@link Groups} Mapping
*
* @param principals list of principals to file to find group for
* @return AuthorizationInfo
*/
@Override
public AuthorizationInfo doGetAuthorizationInfo(PrincipalCollection principals)
throws AuthorizationException {
Set<String> roles = mapGroupPrincipals(principals.getPrimaryPrincipal().toString());
return new SimpleAuthorizationInfo(roles);
}
/**
* Query the Hadoop implementation of {@link Groups} to retrieve groups for
* provided user.
*/
public Set<String> mapGroupPrincipals(final String mappedPrincipalName)
throws AuthorizationException {
/* return the groups as seen by Hadoop */
Set<String> groups;
try {
hadoopGroups.refresh();
final List<String> groupList = hadoopGroups.getGroups(mappedPrincipalName);
LOGGER.debug(String.format("group found %s, %s",
mappedPrincipalName, groupList.toString()));
groups = new HashSet<>(groupList);
} catch (final IOException e) {
if (e.toString().contains("No groups found for user")) {
/* no groups found move on */
LOGGER.info(String.format("No groups found for user %s", mappedPrincipalName));
} else {
/* Log the error and return empty group */
LOGGER.info(String.format("errorGettingUserGroups for %s", mappedPrincipalName));
throw new AuthorizationException(e);
}
groups = new HashSet<>();
}
return groups;
}
/**
* This is called when Kerberos authentication is done and a {@link KerberosToken} has
* been acquired.
* This function returns a Shiro {@link SimpleAccount} based on the {@link KerberosToken}
* provided. Null otherwise.
*/
@Override
protected AuthenticationInfo doGetAuthenticationInfo(
org.apache.shiro.authc.AuthenticationToken authenticationToken)
throws org.apache.shiro.authc.AuthenticationException {
if (null != authenticationToken) {
KerberosToken kerberosToken = (KerberosToken) authenticationToken;
SimpleAccount account = new SimpleAccount(kerberosToken.getPrincipal(),
kerberosToken.getCredentials(), kerberosToken.getClass().getName());
account.addRole(mapGroupPrincipals((String)kerberosToken.getPrincipal()));
return account;
}
return null;
}
/**
* If the request has a valid authentication token it allows the request to continue to
* the target resource,
* otherwise it triggers a GSS-API sequence for authentication
*
* @param request the request object.
* @param response the response object.
* @param filterChain the filter chain object.
* @throws IOException thrown if an IO error occurred.
* @throws ServletException thrown if a processing error occurred.
*/
public void doKerberosAuth(ServletRequest request,
ServletResponse response,
FilterChain filterChain)
throws IOException, ServletException {
boolean unauthorizedResponse = true;
int errCode = HttpServletResponse.SC_UNAUTHORIZED;
AuthenticationException authenticationEx = null;
HttpServletRequest httpRequest = (HttpServletRequest) request;
HttpServletResponse httpResponse = (HttpServletResponse) response;
boolean isHttps = "https".equals(httpRequest.getScheme());
try {
boolean newToken = false;
AuthenticationToken token;
try {
token = getToken(httpRequest);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Got token {} from httpRequest {}", token,
getRequestURL(httpRequest));
if (null != token) {
LOGGER.debug("token.isExpired() = " + token.isExpired());
}
}
} catch (AuthenticationException ex) {
LOGGER.warn("AuthenticationToken ignored: " + ex.getMessage());
if (!ex.getMessage().equals("Empty token")) {
// will be sent back in a 401 unless filter authenticates
authenticationEx = ex;
}
token = null;
}
if (managementOperation(token, httpRequest, httpResponse)) {
if (token == null || token.isExpired()) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Request [{}] triggering authentication. handler: {}",
getRequestURL(httpRequest), this.getClass());
}
token = authenticate(httpRequest, httpResponse);
if (token != null && token != AuthenticationToken.ANONYMOUS) {
// TODO(vr): uncomment when we move to Hadoop 2.8+
// if (token.getMaxInactives() > 0) {
// token.setMaxInactives(System.currentTimeMillis()
// + getTokenMaxInactiveInterval() * 1000);
// }
if (token.getExpires() != 0) {
token.setExpires(System.currentTimeMillis()
+ getTokenValidity() * 1000);
}
}
newToken = true;
}
if (token != null) {
unauthorizedResponse = false;
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Request [{}] user [{}] authenticated",
getRequestURL(httpRequest), token.getUserName());
}
final AuthenticationToken authToken = token;
httpRequest = new HttpServletRequestWrapper(httpRequest) {
@Override
public String getAuthType() {
return authToken.getType();
}
@Override
public String getRemoteUser() {
return authToken.getUserName();
}
@Override
public Principal getUserPrincipal() {
return (authToken != AuthenticationToken.ANONYMOUS) ?
authToken : null;
}
};
// If cookie persistence is configured to false,
// it means the cookie will be a session cookie.
// If the token is an old one, renew the its tokenMaxInactiveInterval.
if (!newToken && !isCookiePersistent()
&& getTokenMaxInactiveInterval() > 0) {
// TODO(vr): uncomment when we move to Hadoop 2.8+
// token.setMaxInactives(System.currentTimeMillis()
// + getTokenMaxInactiveInterval() * 1000);
token.setExpires(token.getExpires());
newToken = true;
}
if (newToken && !token.isExpired()
&& token != AuthenticationToken.ANONYMOUS) {
String signedToken = signer.sign(token.toString());
createAuthCookie(httpResponse, signedToken, getCookieDomain(),
getCookiePath(), token.getExpires(),
isCookiePersistent(), isHttps);
}
KerberosToken kerberosToken = new KerberosToken(token.getUserName(), token.toString());
SecurityUtils.getSubject().login(kerberosToken);
doFilter(filterChain, httpRequest, httpResponse);
}
} else {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("managementOperation returned false for request {}."
+ " token: {}", getRequestURL(httpRequest), token);
}
unauthorizedResponse = false;
}
} catch (AuthenticationException ex) {
// exception from the filter itself is fatal
errCode = HttpServletResponse.SC_FORBIDDEN;
authenticationEx = ex;
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Authentication exception: " + ex.getMessage(), ex);
} else {
LOGGER.warn("Authentication exception: " + ex.getMessage());
}
}
if (unauthorizedResponse) {
if (!httpResponse.isCommitted()) {
createAuthCookie(httpResponse, "", getCookieDomain(),
getCookiePath(), 0, isCookiePersistent(), isHttps);
// If response code is 401. Then WWW-Authenticate Header should be
// present.. reset to 403 if not found..
if ((errCode == HttpServletResponse.SC_UNAUTHORIZED)
&& (!httpResponse.containsHeader(KerberosAuthenticator.WWW_AUTHENTICATE))) {
errCode = HttpServletResponse.SC_FORBIDDEN;
}
if (authenticationEx == null) {
httpResponse.sendError(errCode, "Authentication required");
} else {
httpResponse.sendError(errCode, authenticationEx.getMessage());
}
}
}
}
/**
* It enforces the the Kerberos SPNEGO authentication sequence returning an
* {@link AuthenticationToken} only after the Kerberos SPNEGO sequence has
* completed successfully.
*
* @param request the HTTP client request.
* @param response the HTTP client response.
* @return an authentication token if the Kerberos SPNEGO sequence is complete
* and valid, <code>null</code> if it is in progress (in this case the handler
* handles the response to the client).
* @throws IOException thrown if an IO error occurred.
* @throws AuthenticationException thrown if Kerberos SPNEGO sequence failed.
*/
public AuthenticationToken authenticate(HttpServletRequest request,
final HttpServletResponse response)
throws IOException, AuthenticationException {
AuthenticationToken token = null;
String authorization = request.getHeader(
KerberosAuthenticator.AUTHORIZATION);
if (authorization == null
|| !authorization.startsWith(KerberosAuthenticator.NEGOTIATE)) {
response.setHeader(KerberosAuthenticator.WWW_AUTHENTICATE, KerberosAuthenticator.NEGOTIATE);
response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
if (authorization == null) {
LOGGER.trace("SPNEGO starting for url: {}", request.getRequestURL());
} else {
LOGGER.warn("'" + KerberosAuthenticator.AUTHORIZATION +
"' does not start with '" +
KerberosAuthenticator.NEGOTIATE + "' : {}", authorization);
}
} else {
authorization = authorization.substring(
KerberosAuthenticator.NEGOTIATE.length()).trim();
final Base64 base64 = new Base64(0);
final byte[] clientToken = base64.decode(authorization);
try {
final String serverPrincipal =
KerberosUtil.getTokenServerName(clientToken);
if (!serverPrincipal.startsWith("HTTP/")) {
throw new IllegalArgumentException(
"Invalid server principal " + serverPrincipal +
"decoded from client request");
}
token = Subject.doAs(serverSubject,
(PrivilegedExceptionAction<AuthenticationToken>) () -> runWithPrincipal(serverPrincipal, clientToken,
base64, response));
} catch (PrivilegedActionException ex) {
if (ex.getException() instanceof IOException) {
throw (IOException) ex.getException();
} else {
throw new AuthenticationException(ex.getException());
}
} catch (Exception ex) {
throw new AuthenticationException(ex);
}
}
return token;
}
private AuthenticationToken runWithPrincipal(String serverPrincipal,
byte[] clientToken, Base64 base64,
HttpServletResponse response)
throws IOException, GSSException {
GSSContext gssContext = null;
GSSCredential gssCreds = null;
AuthenticationToken token = null;
try {
LOGGER.trace("SPNEGO initiated with server principal [{}]", serverPrincipal);
gssCreds = this.gssManager.createCredential(
this.gssManager.createName(serverPrincipal,
KerberosUtil.NT_GSS_KRB5_PRINCIPAL_OID),
GSSCredential.INDEFINITE_LIFETIME,
new Oid[]{KerberosUtil.GSS_SPNEGO_MECH_OID, KerberosUtil.GSS_KRB5_MECH_OID},
GSSCredential.ACCEPT_ONLY);
gssContext = this.gssManager.createContext(gssCreds);
byte[] serverToken = gssContext.acceptSecContext(clientToken, 0,
clientToken.length);
if (serverToken != null && serverToken.length > 0) {
String authenticate = base64.encodeToString(serverToken);
response.setHeader(KerberosAuthenticator.WWW_AUTHENTICATE,
KerberosAuthenticator.NEGOTIATE + " " +
authenticate);
}
if (!gssContext.isEstablished()) {
response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
LOGGER.trace("SPNEGO in progress");
} else {
String clientPrincipal = gssContext.getSrcName().toString();
KerberosName kerberosName = new KerberosName(clientPrincipal);
String userName = kerberosName.getShortName();
token = new AuthenticationToken(userName, clientPrincipal, TYPE);
response.setStatus(HttpServletResponse.SC_OK);
LOGGER.trace("SPNEGO completed for client principal [{}]",
clientPrincipal);
}
} finally {
if (gssContext != null) {
gssContext.dispose();
}
if (gssCreds != null) {
gssCreds.dispose();
}
}
return token;
}
/**
* Returns the full URL of the request including the query string.
* <p>
* Used as a convenience method for logging purposes.
*
* @param request the request object.
* @return the full URL of the request including the query string.
*/
protected String getRequestURL(HttpServletRequest request) {
StringBuffer sb = request.getRequestURL();
if (request.getQueryString() != null) {
sb.append("?").append(request.getQueryString());
}
return sb.toString();
}
/**
* Returns the {@link AuthenticationToken} for the request.
* <p>
* It looks at the received HTTP cookies and extracts the value of the
* {@link AuthenticatedURL#AUTH_COOKIE}
* if present. It verifies the signature and if correct it creates the
* {@link AuthenticationToken} and returns
* it.
* <p>
* If this method returns <code>null</code> the filter will invoke the configured
* {@link AuthenticationHandler}
* to perform user authentication.
*
* @param request request object.
* @return the Authentication token if the request is authenticated, <code>null</code> otherwise.
* @throws AuthenticationException thrown if the token is invalid or if it has expired.
*/
private AuthenticationToken getToken(HttpServletRequest request)
throws AuthenticationException {
AuthenticationToken token;
Cookie[] cookies = request.getCookies();
token = getTokenFromCookies(cookies);
return token;
}
private static AuthenticationToken getTokenFromCookies(Cookie[] cookies)
throws AuthenticationException {
AuthenticationToken token = null;
String tokenStr = null;
if (cookies != null) {
for (Cookie cookie : cookies) {
if (cookie.getName().equals(AuthenticatedURL.AUTH_COOKIE)) {
tokenStr = cookie.getValue();
if (tokenStr.isEmpty()) {
throw new AuthenticationException("Empty token");
}
try {
tokenStr = signer.verifyAndExtract(tokenStr);
} catch (SignerException ex) {
throw new AuthenticationException(ex);
}
break;
}
}
}
if (tokenStr != null) {
token = AuthenticationToken.parse(tokenStr);
boolean match = verifyTokenType(token);
if (!match) {
throw new AuthenticationException("Invalid AuthenticationToken type");
}
if (token.isExpired()) {
throw new AuthenticationException("AuthenticationToken expired");
}
}
return token;
}
/**
* A parallel implementation to getTokenFromCookies, this handles
* javax.ws.rs.core.HttpHeaders.Cookies kind.
*
* Used in {@link org.apache.zeppelin.rest.LoginRestApi}::getLogin()
*
* @param cookies - Cookie(s) map read from HttpHeaders
* @return {@link KerberosToken} if available in AUTHORIZATION cookie
*
* @throws org.apache.shiro.authc.AuthenticationException
*/
public static KerberosToken getKerberosTokenFromCookies(
Map<String, jakarta.ws.rs.core.Cookie> cookies)
throws org.apache.shiro.authc.AuthenticationException {
KerberosToken kerberosToken = null;
String tokenStr = null;
if (cookies != null) {
for (jakarta.ws.rs.core.Cookie cookie : cookies.values()) {
if (cookie.getName().equals(KerberosAuthenticator.AUTHORIZATION)) {
tokenStr = cookie.getValue();
if (tokenStr.isEmpty()) {
throw new org.apache.shiro.authc.AuthenticationException("Empty token");
}
try {
tokenStr = tokenStr.substring(KerberosAuthenticator.NEGOTIATE.length()).trim();
} catch (Exception ex) {
throw new org.apache.shiro.authc.AuthenticationException(ex);
}
break;
}
}
}
if (tokenStr != null) {
try {
AuthenticationToken authToken = AuthenticationToken.parse(tokenStr);
boolean match = verifyTokenType(authToken);
if (!match) {
throw new
org.apache.shiro.authc.AuthenticationException("Invalid AuthenticationToken type");
}
if (authToken.isExpired()) {
throw new org.apache.shiro.authc.AuthenticationException("AuthenticationToken expired");
}
kerberosToken = new KerberosToken(authToken.getUserName(), tokenStr);
} catch (AuthenticationException ex) {
throw new org.apache.shiro.authc.AuthenticationException(ex);
}
}
return kerberosToken;
}
/**
* This method verifies if the specified token type matches one of the the
* token types supported by our Authentication provider : {@link KerberosRealm}
*
* @param token The token whose type needs to be verified.
* @return true If the token type matches one of the supported token types
* false Otherwise
*/
protected static boolean verifyTokenType(AuthenticationToken token) {
return TYPE.equals(token.getType());
}
/**
* Delegates call to the servlet filter chain. Sub-classes my override this
* method to perform pre and post tasks.
*
* @param filterChain the filter chain object.
* @param request the request object.
* @param response the response object.
* @throws IOException thrown if an IO error occurred.
* @throws ServletException thrown if a processing error occurred.
*/
protected void doFilter(FilterChain filterChain, HttpServletRequest request,
HttpServletResponse response) throws IOException, ServletException {
filterChain.doFilter(request, response);
}
/**
* Creates the Hadoop authentication HTTP cookie.
*
* @param resp the response object.
* @param token authentication token for the cookie.
* @param domain the cookie domain.
* @param path the cookie path.
* @param expires UNIX timestamp that indicates the expire date of the
* cookie. It has no effect if its value < 0.
* @param isSecure is the cookie secure?
* @param isCookiePersistent whether the cookie is persistent or not.
* <p>
* XXX the following code duplicate some logic in Jetty / Servlet API,
* because of the fact that Hadoop is stuck at servlet 2.5 and jetty 6
* right now.
*/
public static void createAuthCookie(HttpServletResponse resp, String token,
String domain, String path, long expires,
boolean isCookiePersistent,
boolean isSecure) {
StringBuilder sb = new StringBuilder(AuthenticatedURL.AUTH_COOKIE)
.append("=");
if (token != null && token.length() > 0) {
sb.append("\"").append(token).append("\"");
}
if (path != null) {
sb.append("; Path=").append(path);
}
if (domain != null) {
sb.append("; Domain=").append(domain);
}
if (expires >= 0 && isCookiePersistent) {
Date date = new Date(expires);
SimpleDateFormat df = new SimpleDateFormat("EEE, " +
"dd-MMM-yyyy HH:mm:ss zzz");
df.setTimeZone(TimeZone.getTimeZone("GMT"));
sb.append("; Expires=").append(df.format(date));
}
if (isSecure) {
sb.append("; Secure");
}
sb.append("; HttpOnly");
resp.addHeader("Set-Cookie", sb.toString());
}
/**
* Returns a {@link Properties} config object after dumping all {@link KerberosRealm} bean
* properties received from shiro.ini
*
*/
protected Properties getConfiguration() {
Properties props = new Properties();
props.put(COOKIE_DOMAIN, cookieDomain);
props.put(COOKIE_PATH, cookiePath);
props.put(COOKIE_PERSISTENT, isCookiePersistent);
props.put(SIGNER_SECRET_PROVIDER, signatureSecretProvider);
props.put(SIGNATURE_SECRET_FILE, signatureSecretFile);
props.put(AUTH_TYPE, TYPE);
props.put(AUTH_TOKEN_VALIDITY, tokenValidity);
props.put(AUTH_TOKEN_MAX_INACTIVE_INTERVAL, tokenMaxInactiveInterval);
props.put(PRINCIPAL, principal);
props.put(KEYTAB, keytab);
props.put(NAME_RULES, nameRules);
props.put(LOGOUT, logout);
return props;
}
/**
* Returns the max inactive interval time of the generated tokens.
*
* @return the max inactive interval time of the generated tokens in seconds.
*/
protected long getTokenMaxInactiveInterval() {
return tokenMaxInactiveInterval / 1000;
}
/**
* Returns the tokenValidity time of the generated tokens.
*
* @return the tokenValidity time of the generated tokens, in seconds.
*/
protected long getTokenValidity() {
return tokenValidity / 1000;
}
/**
* Returns the cookie domain to use for the HTTP cookie.
*
* @return the cookie domain to use for the HTTP cookie.
*/
protected String getCookieDomain() {
return cookieDomain;
}
/**
* Returns the cookie path to use for the HTTP cookie.
*
* @return the cookie path to use for the HTTP cookie.
*/
protected String getCookiePath() {
return cookiePath;
}
/**
* Returns the cookie persistence to use for the HTTP cookie.
*
* @return the cookie persistence to use for the HTTP cookie.
*/
public boolean isCookiePersistent() {
return isCookiePersistent;
}
public void setTokenMaxInactiveInterval(long tokenMaxInactiveInterval) {
this.tokenMaxInactiveInterval = tokenMaxInactiveInterval * 1000;
}
public void setTokenValidity(long tokenValidity) {
this.tokenValidity = tokenValidity * 1000;
}
public void setCookieDomain(String cookieDomain) {
this.cookieDomain = cookieDomain;
}
public void setCookiePath(String cookiePath) {
this.cookiePath = cookiePath;
}
public void setCookiePersistent(boolean cookiePersistent) {
isCookiePersistent = cookiePersistent;
}
public String getPrincipal() {
return principal;
}
public void setPrincipal(String principal) {
this.principal = principal;
}
public void setKeytab(String keytab) {
this.keytab = keytab;
}
public String getNameRules() {
return nameRules;
}
public void setNameRules(String nameRules) {
this.nameRules = nameRules;
}
public String getSignatureSecretFile() {
return signatureSecretFile;
}
public void setSignatureSecretFile(String signatureSecretFile) {
this.signatureSecretFile = signatureSecretFile;
}
public String getSignatureSecretProvider() {
return signatureSecretProvider;
}
public void setSignatureSecretProvider(String signatureSecretProvider) {
this.signatureSecretProvider = signatureSecretProvider;
}
public String getLogout() {
return logout;
}
public void setLogout(String logout) {
this.logout = logout;
}
public Boolean getLogoutAPI() {
return logoutAPI;
}
public void setLogoutAPI(Boolean logoutAPI) {
this.logoutAPI = logoutAPI;
}
public String getProviderUrl() {
return providerUrl;
}
public void setProviderUrl(String providerUrl) {
this.providerUrl = providerUrl;
}
public String getRedirectParam() {
return redirectParam;
}
public void setRedirectParam(String redirectParam) {
this.redirectParam = redirectParam;
}
/**
* Releases any resources initialized by the authentication handler.
* <p>
* It destroys the Kerberos context.
*/
public void destroy() {
keytab = null;
serverSubject = null;
if (secretProvider != null && destroySecretProvider) {
secretProvider.destroy();
secretProvider = null;
}
}
}
|
googleapis/google-cloud-java | 36,984 | java-apigee-registry/proto-google-cloud-apigee-registry-v1/src/main/java/com/google/cloud/apigeeregistry/v1/ListApiDeploymentsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/apigeeregistry/v1/registry_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.apigeeregistry.v1;
/**
*
*
* <pre>
* Request message for ListApiDeployments.
* </pre>
*
* Protobuf type {@code google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest}
*/
public final class ListApiDeploymentsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest)
ListApiDeploymentsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListApiDeploymentsRequest.newBuilder() to construct.
private ListApiDeploymentsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListApiDeploymentsRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListApiDeploymentsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListApiDeploymentsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListApiDeploymentsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest.class,
com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent, which owns this collection of deployments.
* Format: `projects/*/locations/*/apis/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of deployments.
* Format: `projects/*/locations/*/apis/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The maximum number of deployments to return.
* The service may return fewer than this value.
* If unspecified, at most 50 values will be returned.
* The maximum is 1000; values above 1000 will be coerced to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A page token, received from a previous `ListApiDeployments` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListApiDeployments` must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A page token, received from a previous `ListApiDeployments` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListApiDeployments` must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* An expression that can be used to filter the list. Filters use the Common
* Expression Language and can refer to all message fields.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* An expression that can be used to filter the list. Filters use the Common
* Expression Language and can refer to all message fields.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest)) {
return super.equals(obj);
}
com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest other =
(com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for ListApiDeployments.
* </pre>
*
* Protobuf type {@code google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest)
com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListApiDeploymentsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListApiDeploymentsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest.class,
com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest.Builder.class);
}
// Construct using com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListApiDeploymentsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest
getDefaultInstanceForType() {
return com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest build() {
com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest buildPartial() {
com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest result =
new com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest) {
return mergeFrom((com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest other) {
if (other
== com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent, which owns this collection of deployments.
* Format: `projects/*/locations/*/apis/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of deployments.
* Format: `projects/*/locations/*/apis/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of deployments.
* Format: `projects/*/locations/*/apis/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of deployments.
* Format: `projects/*/locations/*/apis/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of deployments.
* Format: `projects/*/locations/*/apis/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The maximum number of deployments to return.
* The service may return fewer than this value.
* If unspecified, at most 50 values will be returned.
* The maximum is 1000; values above 1000 will be coerced to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The maximum number of deployments to return.
* The service may return fewer than this value.
* If unspecified, at most 50 values will be returned.
* The maximum is 1000; values above 1000 will be coerced to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of deployments to return.
* The service may return fewer than this value.
* If unspecified, at most 50 values will be returned.
* The maximum is 1000; values above 1000 will be coerced to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A page token, received from a previous `ListApiDeployments` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListApiDeployments` must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A page token, received from a previous `ListApiDeployments` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListApiDeployments` must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A page token, received from a previous `ListApiDeployments` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListApiDeployments` must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* A page token, received from a previous `ListApiDeployments` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListApiDeployments` must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* A page token, received from a previous `ListApiDeployments` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListApiDeployments` must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* An expression that can be used to filter the list. Filters use the Common
* Expression Language and can refer to all message fields.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* An expression that can be used to filter the list. Filters use the Common
* Expression Language and can refer to all message fields.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* An expression that can be used to filter the list. Filters use the Common
* Expression Language and can refer to all message fields.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* An expression that can be used to filter the list. Filters use the Common
* Expression Language and can refer to all message fields.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* An expression that can be used to filter the list. Filters use the Common
* Expression Language and can refer to all message fields.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest)
private static final com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest();
}
public static com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListApiDeploymentsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListApiDeploymentsRequest>() {
@java.lang.Override
public ListApiDeploymentsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListApiDeploymentsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListApiDeploymentsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.ListApiDeploymentsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,032 | java-recommender/proto-google-cloud-recommender-v1beta1/src/main/java/com/google/cloud/recommender/v1beta1/UpdateInsightTypeConfigRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/recommender/v1beta1/recommender_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.recommender.v1beta1;
/**
*
*
* <pre>
* Request for the `UpdateInsightTypeConfig` method.
* </pre>
*
* Protobuf type {@code google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest}
*/
public final class UpdateInsightTypeConfigRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest)
UpdateInsightTypeConfigRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateInsightTypeConfigRequest.newBuilder() to construct.
private UpdateInsightTypeConfigRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateInsightTypeConfigRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateInsightTypeConfigRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.recommender.v1beta1.RecommenderProto
.internal_static_google_cloud_recommender_v1beta1_UpdateInsightTypeConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.recommender.v1beta1.RecommenderProto
.internal_static_google_cloud_recommender_v1beta1_UpdateInsightTypeConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest.class,
com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest.Builder.class);
}
private int bitField0_;
public static final int INSIGHT_TYPE_CONFIG_FIELD_NUMBER = 1;
private com.google.cloud.recommender.v1beta1.InsightTypeConfig insightTypeConfig_;
/**
*
*
* <pre>
* Required. The InsightTypeConfig to update.
* </pre>
*
* <code>
* .google.cloud.recommender.v1beta1.InsightTypeConfig insight_type_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the insightTypeConfig field is set.
*/
@java.lang.Override
public boolean hasInsightTypeConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The InsightTypeConfig to update.
* </pre>
*
* <code>
* .google.cloud.recommender.v1beta1.InsightTypeConfig insight_type_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The insightTypeConfig.
*/
@java.lang.Override
public com.google.cloud.recommender.v1beta1.InsightTypeConfig getInsightTypeConfig() {
return insightTypeConfig_ == null
? com.google.cloud.recommender.v1beta1.InsightTypeConfig.getDefaultInstance()
: insightTypeConfig_;
}
/**
*
*
* <pre>
* Required. The InsightTypeConfig to update.
* </pre>
*
* <code>
* .google.cloud.recommender.v1beta1.InsightTypeConfig insight_type_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.recommender.v1beta1.InsightTypeConfigOrBuilder
getInsightTypeConfigOrBuilder() {
return insightTypeConfig_ == null
? com.google.cloud.recommender.v1beta1.InsightTypeConfig.getDefaultInstance()
: insightTypeConfig_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
public static final int VALIDATE_ONLY_FIELD_NUMBER = 3;
private boolean validateOnly_ = false;
/**
*
*
* <pre>
* If true, validate the request and preview the change, but do not actually
* update it.
* </pre>
*
* <code>bool validate_only = 3;</code>
*
* @return The validateOnly.
*/
@java.lang.Override
public boolean getValidateOnly() {
return validateOnly_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getInsightTypeConfig());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
if (validateOnly_ != false) {
output.writeBool(3, validateOnly_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getInsightTypeConfig());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
if (validateOnly_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, validateOnly_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest)) {
return super.equals(obj);
}
com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest other =
(com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest) obj;
if (hasInsightTypeConfig() != other.hasInsightTypeConfig()) return false;
if (hasInsightTypeConfig()) {
if (!getInsightTypeConfig().equals(other.getInsightTypeConfig())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (getValidateOnly() != other.getValidateOnly()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasInsightTypeConfig()) {
hash = (37 * hash) + INSIGHT_TYPE_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getInsightTypeConfig().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (37 * hash) + VALIDATE_ONLY_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getValidateOnly());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for the `UpdateInsightTypeConfig` method.
* </pre>
*
* Protobuf type {@code google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest)
com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.recommender.v1beta1.RecommenderProto
.internal_static_google_cloud_recommender_v1beta1_UpdateInsightTypeConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.recommender.v1beta1.RecommenderProto
.internal_static_google_cloud_recommender_v1beta1_UpdateInsightTypeConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest.class,
com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest.Builder.class);
}
// Construct using
// com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getInsightTypeConfigFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
insightTypeConfig_ = null;
if (insightTypeConfigBuilder_ != null) {
insightTypeConfigBuilder_.dispose();
insightTypeConfigBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
validateOnly_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.recommender.v1beta1.RecommenderProto
.internal_static_google_cloud_recommender_v1beta1_UpdateInsightTypeConfigRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest
getDefaultInstanceForType() {
return com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest build() {
com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest buildPartial() {
com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest result =
new com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.insightTypeConfig_ =
insightTypeConfigBuilder_ == null
? insightTypeConfig_
: insightTypeConfigBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.validateOnly_ = validateOnly_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest) {
return mergeFrom(
(com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest other) {
if (other
== com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest
.getDefaultInstance()) return this;
if (other.hasInsightTypeConfig()) {
mergeInsightTypeConfig(other.getInsightTypeConfig());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
if (other.getValidateOnly() != false) {
setValidateOnly(other.getValidateOnly());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(
getInsightTypeConfigFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
validateOnly_ = input.readBool();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.recommender.v1beta1.InsightTypeConfig insightTypeConfig_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.recommender.v1beta1.InsightTypeConfig,
com.google.cloud.recommender.v1beta1.InsightTypeConfig.Builder,
com.google.cloud.recommender.v1beta1.InsightTypeConfigOrBuilder>
insightTypeConfigBuilder_;
/**
*
*
* <pre>
* Required. The InsightTypeConfig to update.
* </pre>
*
* <code>
* .google.cloud.recommender.v1beta1.InsightTypeConfig insight_type_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the insightTypeConfig field is set.
*/
public boolean hasInsightTypeConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The InsightTypeConfig to update.
* </pre>
*
* <code>
* .google.cloud.recommender.v1beta1.InsightTypeConfig insight_type_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The insightTypeConfig.
*/
public com.google.cloud.recommender.v1beta1.InsightTypeConfig getInsightTypeConfig() {
if (insightTypeConfigBuilder_ == null) {
return insightTypeConfig_ == null
? com.google.cloud.recommender.v1beta1.InsightTypeConfig.getDefaultInstance()
: insightTypeConfig_;
} else {
return insightTypeConfigBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The InsightTypeConfig to update.
* </pre>
*
* <code>
* .google.cloud.recommender.v1beta1.InsightTypeConfig insight_type_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setInsightTypeConfig(
com.google.cloud.recommender.v1beta1.InsightTypeConfig value) {
if (insightTypeConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
insightTypeConfig_ = value;
} else {
insightTypeConfigBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The InsightTypeConfig to update.
* </pre>
*
* <code>
* .google.cloud.recommender.v1beta1.InsightTypeConfig insight_type_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setInsightTypeConfig(
com.google.cloud.recommender.v1beta1.InsightTypeConfig.Builder builderForValue) {
if (insightTypeConfigBuilder_ == null) {
insightTypeConfig_ = builderForValue.build();
} else {
insightTypeConfigBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The InsightTypeConfig to update.
* </pre>
*
* <code>
* .google.cloud.recommender.v1beta1.InsightTypeConfig insight_type_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeInsightTypeConfig(
com.google.cloud.recommender.v1beta1.InsightTypeConfig value) {
if (insightTypeConfigBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& insightTypeConfig_ != null
&& insightTypeConfig_
!= com.google.cloud.recommender.v1beta1.InsightTypeConfig.getDefaultInstance()) {
getInsightTypeConfigBuilder().mergeFrom(value);
} else {
insightTypeConfig_ = value;
}
} else {
insightTypeConfigBuilder_.mergeFrom(value);
}
if (insightTypeConfig_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The InsightTypeConfig to update.
* </pre>
*
* <code>
* .google.cloud.recommender.v1beta1.InsightTypeConfig insight_type_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearInsightTypeConfig() {
bitField0_ = (bitField0_ & ~0x00000001);
insightTypeConfig_ = null;
if (insightTypeConfigBuilder_ != null) {
insightTypeConfigBuilder_.dispose();
insightTypeConfigBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The InsightTypeConfig to update.
* </pre>
*
* <code>
* .google.cloud.recommender.v1beta1.InsightTypeConfig insight_type_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.recommender.v1beta1.InsightTypeConfig.Builder
getInsightTypeConfigBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getInsightTypeConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The InsightTypeConfig to update.
* </pre>
*
* <code>
* .google.cloud.recommender.v1beta1.InsightTypeConfig insight_type_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.recommender.v1beta1.InsightTypeConfigOrBuilder
getInsightTypeConfigOrBuilder() {
if (insightTypeConfigBuilder_ != null) {
return insightTypeConfigBuilder_.getMessageOrBuilder();
} else {
return insightTypeConfig_ == null
? com.google.cloud.recommender.v1beta1.InsightTypeConfig.getDefaultInstance()
: insightTypeConfig_;
}
}
/**
*
*
* <pre>
* Required. The InsightTypeConfig to update.
* </pre>
*
* <code>
* .google.cloud.recommender.v1beta1.InsightTypeConfig insight_type_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.recommender.v1beta1.InsightTypeConfig,
com.google.cloud.recommender.v1beta1.InsightTypeConfig.Builder,
com.google.cloud.recommender.v1beta1.InsightTypeConfigOrBuilder>
getInsightTypeConfigFieldBuilder() {
if (insightTypeConfigBuilder_ == null) {
insightTypeConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.recommender.v1beta1.InsightTypeConfig,
com.google.cloud.recommender.v1beta1.InsightTypeConfig.Builder,
com.google.cloud.recommender.v1beta1.InsightTypeConfigOrBuilder>(
getInsightTypeConfig(), getParentForChildren(), isClean());
insightTypeConfig_ = null;
}
return insightTypeConfigBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private boolean validateOnly_;
/**
*
*
* <pre>
* If true, validate the request and preview the change, but do not actually
* update it.
* </pre>
*
* <code>bool validate_only = 3;</code>
*
* @return The validateOnly.
*/
@java.lang.Override
public boolean getValidateOnly() {
return validateOnly_;
}
/**
*
*
* <pre>
* If true, validate the request and preview the change, but do not actually
* update it.
* </pre>
*
* <code>bool validate_only = 3;</code>
*
* @param value The validateOnly to set.
* @return This builder for chaining.
*/
public Builder setValidateOnly(boolean value) {
validateOnly_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* If true, validate the request and preview the change, but do not actually
* update it.
* </pre>
*
* <code>bool validate_only = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearValidateOnly() {
bitField0_ = (bitField0_ & ~0x00000004);
validateOnly_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest)
private static final com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest();
}
public static com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateInsightTypeConfigRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateInsightTypeConfigRequest>() {
@java.lang.Override
public UpdateInsightTypeConfigRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateInsightTypeConfigRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateInsightTypeConfigRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.recommender.v1beta1.UpdateInsightTypeConfigRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,144 | java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/SecurityPolicyRuleHttpHeaderAction.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction}
*/
public final class SecurityPolicyRuleHttpHeaderAction extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction)
SecurityPolicyRuleHttpHeaderActionOrBuilder {
private static final long serialVersionUID = 0L;
// Use SecurityPolicyRuleHttpHeaderAction.newBuilder() to construct.
private SecurityPolicyRuleHttpHeaderAction(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SecurityPolicyRuleHttpHeaderAction() {
requestHeadersToAdds_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SecurityPolicyRuleHttpHeaderAction();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_SecurityPolicyRuleHttpHeaderAction_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_SecurityPolicyRuleHttpHeaderAction_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction.class,
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction.Builder.class);
}
public static final int REQUEST_HEADERS_TO_ADDS_FIELD_NUMBER = 87987661;
@SuppressWarnings("serial")
private java.util.List<
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption>
requestHeadersToAdds_;
/**
*
*
* <pre>
* The list of request headers to add or overwrite if they're already present.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption request_headers_to_adds = 87987661;
* </code>
*/
@java.lang.Override
public java.util.List<
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption>
getRequestHeadersToAddsList() {
return requestHeadersToAdds_;
}
/**
*
*
* <pre>
* The list of request headers to add or overwrite if they're already present.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption request_headers_to_adds = 87987661;
* </code>
*/
@java.lang.Override
public java.util.List<
? extends
com.google.cloud.compute.v1
.SecurityPolicyRuleHttpHeaderActionHttpHeaderOptionOrBuilder>
getRequestHeadersToAddsOrBuilderList() {
return requestHeadersToAdds_;
}
/**
*
*
* <pre>
* The list of request headers to add or overwrite if they're already present.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption request_headers_to_adds = 87987661;
* </code>
*/
@java.lang.Override
public int getRequestHeadersToAddsCount() {
return requestHeadersToAdds_.size();
}
/**
*
*
* <pre>
* The list of request headers to add or overwrite if they're already present.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption request_headers_to_adds = 87987661;
* </code>
*/
@java.lang.Override
public com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption
getRequestHeadersToAdds(int index) {
return requestHeadersToAdds_.get(index);
}
/**
*
*
* <pre>
* The list of request headers to add or overwrite if they're already present.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption request_headers_to_adds = 87987661;
* </code>
*/
@java.lang.Override
public com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOptionOrBuilder
getRequestHeadersToAddsOrBuilder(int index) {
return requestHeadersToAdds_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < requestHeadersToAdds_.size(); i++) {
output.writeMessage(87987661, requestHeadersToAdds_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < requestHeadersToAdds_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
87987661, requestHeadersToAdds_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction other =
(com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction) obj;
if (!getRequestHeadersToAddsList().equals(other.getRequestHeadersToAddsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getRequestHeadersToAddsCount() > 0) {
hash = (37 * hash) + REQUEST_HEADERS_TO_ADDS_FIELD_NUMBER;
hash = (53 * hash) + getRequestHeadersToAddsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction)
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_SecurityPolicyRuleHttpHeaderAction_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_SecurityPolicyRuleHttpHeaderAction_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction.class,
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction.Builder.class);
}
// Construct using com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (requestHeadersToAddsBuilder_ == null) {
requestHeadersToAdds_ = java.util.Collections.emptyList();
} else {
requestHeadersToAdds_ = null;
requestHeadersToAddsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_SecurityPolicyRuleHttpHeaderAction_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction
getDefaultInstanceForType() {
return com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction build() {
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction buildPartial() {
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction result =
new com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction result) {
if (requestHeadersToAddsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
requestHeadersToAdds_ = java.util.Collections.unmodifiableList(requestHeadersToAdds_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.requestHeadersToAdds_ = requestHeadersToAdds_;
} else {
result.requestHeadersToAdds_ = requestHeadersToAddsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction) {
return mergeFrom((com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction other) {
if (other
== com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction.getDefaultInstance())
return this;
if (requestHeadersToAddsBuilder_ == null) {
if (!other.requestHeadersToAdds_.isEmpty()) {
if (requestHeadersToAdds_.isEmpty()) {
requestHeadersToAdds_ = other.requestHeadersToAdds_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureRequestHeadersToAddsIsMutable();
requestHeadersToAdds_.addAll(other.requestHeadersToAdds_);
}
onChanged();
}
} else {
if (!other.requestHeadersToAdds_.isEmpty()) {
if (requestHeadersToAddsBuilder_.isEmpty()) {
requestHeadersToAddsBuilder_.dispose();
requestHeadersToAddsBuilder_ = null;
requestHeadersToAdds_ = other.requestHeadersToAdds_;
bitField0_ = (bitField0_ & ~0x00000001);
requestHeadersToAddsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getRequestHeadersToAddsFieldBuilder()
: null;
} else {
requestHeadersToAddsBuilder_.addAllMessages(other.requestHeadersToAdds_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 703901290:
{
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption m =
input.readMessage(
com.google.cloud.compute.v1
.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption.parser(),
extensionRegistry);
if (requestHeadersToAddsBuilder_ == null) {
ensureRequestHeadersToAddsIsMutable();
requestHeadersToAdds_.add(m);
} else {
requestHeadersToAddsBuilder_.addMessage(m);
}
break;
} // case 703901290
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption>
requestHeadersToAdds_ = java.util.Collections.emptyList();
private void ensureRequestHeadersToAddsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
requestHeadersToAdds_ =
new java.util.ArrayList<
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption>(
requestHeadersToAdds_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption,
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption.Builder,
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOptionOrBuilder>
requestHeadersToAddsBuilder_;
/**
*
*
* <pre>
* The list of request headers to add or overwrite if they're already present.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption request_headers_to_adds = 87987661;
* </code>
*/
public java.util.List<
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption>
getRequestHeadersToAddsList() {
if (requestHeadersToAddsBuilder_ == null) {
return java.util.Collections.unmodifiableList(requestHeadersToAdds_);
} else {
return requestHeadersToAddsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of request headers to add or overwrite if they're already present.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption request_headers_to_adds = 87987661;
* </code>
*/
public int getRequestHeadersToAddsCount() {
if (requestHeadersToAddsBuilder_ == null) {
return requestHeadersToAdds_.size();
} else {
return requestHeadersToAddsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of request headers to add or overwrite if they're already present.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption request_headers_to_adds = 87987661;
* </code>
*/
public com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption
getRequestHeadersToAdds(int index) {
if (requestHeadersToAddsBuilder_ == null) {
return requestHeadersToAdds_.get(index);
} else {
return requestHeadersToAddsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of request headers to add or overwrite if they're already present.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption request_headers_to_adds = 87987661;
* </code>
*/
public Builder setRequestHeadersToAdds(
int index,
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption value) {
if (requestHeadersToAddsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRequestHeadersToAddsIsMutable();
requestHeadersToAdds_.set(index, value);
onChanged();
} else {
requestHeadersToAddsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of request headers to add or overwrite if they're already present.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption request_headers_to_adds = 87987661;
* </code>
*/
public Builder setRequestHeadersToAdds(
int index,
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption.Builder
builderForValue) {
if (requestHeadersToAddsBuilder_ == null) {
ensureRequestHeadersToAddsIsMutable();
requestHeadersToAdds_.set(index, builderForValue.build());
onChanged();
} else {
requestHeadersToAddsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of request headers to add or overwrite if they're already present.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption request_headers_to_adds = 87987661;
* </code>
*/
public Builder addRequestHeadersToAdds(
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption value) {
if (requestHeadersToAddsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRequestHeadersToAddsIsMutable();
requestHeadersToAdds_.add(value);
onChanged();
} else {
requestHeadersToAddsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of request headers to add or overwrite if they're already present.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption request_headers_to_adds = 87987661;
* </code>
*/
public Builder addRequestHeadersToAdds(
int index,
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption value) {
if (requestHeadersToAddsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRequestHeadersToAddsIsMutable();
requestHeadersToAdds_.add(index, value);
onChanged();
} else {
requestHeadersToAddsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of request headers to add or overwrite if they're already present.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption request_headers_to_adds = 87987661;
* </code>
*/
public Builder addRequestHeadersToAdds(
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption.Builder
builderForValue) {
if (requestHeadersToAddsBuilder_ == null) {
ensureRequestHeadersToAddsIsMutable();
requestHeadersToAdds_.add(builderForValue.build());
onChanged();
} else {
requestHeadersToAddsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of request headers to add or overwrite if they're already present.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption request_headers_to_adds = 87987661;
* </code>
*/
public Builder addRequestHeadersToAdds(
int index,
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption.Builder
builderForValue) {
if (requestHeadersToAddsBuilder_ == null) {
ensureRequestHeadersToAddsIsMutable();
requestHeadersToAdds_.add(index, builderForValue.build());
onChanged();
} else {
requestHeadersToAddsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of request headers to add or overwrite if they're already present.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption request_headers_to_adds = 87987661;
* </code>
*/
public Builder addAllRequestHeadersToAdds(
java.lang.Iterable<
? extends
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption>
values) {
if (requestHeadersToAddsBuilder_ == null) {
ensureRequestHeadersToAddsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, requestHeadersToAdds_);
onChanged();
} else {
requestHeadersToAddsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of request headers to add or overwrite if they're already present.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption request_headers_to_adds = 87987661;
* </code>
*/
public Builder clearRequestHeadersToAdds() {
if (requestHeadersToAddsBuilder_ == null) {
requestHeadersToAdds_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
requestHeadersToAddsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of request headers to add or overwrite if they're already present.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption request_headers_to_adds = 87987661;
* </code>
*/
public Builder removeRequestHeadersToAdds(int index) {
if (requestHeadersToAddsBuilder_ == null) {
ensureRequestHeadersToAddsIsMutable();
requestHeadersToAdds_.remove(index);
onChanged();
} else {
requestHeadersToAddsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of request headers to add or overwrite if they're already present.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption request_headers_to_adds = 87987661;
* </code>
*/
public com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption.Builder
getRequestHeadersToAddsBuilder(int index) {
return getRequestHeadersToAddsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of request headers to add or overwrite if they're already present.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption request_headers_to_adds = 87987661;
* </code>
*/
public com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOptionOrBuilder
getRequestHeadersToAddsOrBuilder(int index) {
if (requestHeadersToAddsBuilder_ == null) {
return requestHeadersToAdds_.get(index);
} else {
return requestHeadersToAddsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of request headers to add or overwrite if they're already present.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption request_headers_to_adds = 87987661;
* </code>
*/
public java.util.List<
? extends
com.google.cloud.compute.v1
.SecurityPolicyRuleHttpHeaderActionHttpHeaderOptionOrBuilder>
getRequestHeadersToAddsOrBuilderList() {
if (requestHeadersToAddsBuilder_ != null) {
return requestHeadersToAddsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(requestHeadersToAdds_);
}
}
/**
*
*
* <pre>
* The list of request headers to add or overwrite if they're already present.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption request_headers_to_adds = 87987661;
* </code>
*/
public com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption.Builder
addRequestHeadersToAddsBuilder() {
return getRequestHeadersToAddsFieldBuilder()
.addBuilder(
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption
.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of request headers to add or overwrite if they're already present.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption request_headers_to_adds = 87987661;
* </code>
*/
public com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption.Builder
addRequestHeadersToAddsBuilder(int index) {
return getRequestHeadersToAddsFieldBuilder()
.addBuilder(
index,
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption
.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of request headers to add or overwrite if they're already present.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption request_headers_to_adds = 87987661;
* </code>
*/
public java.util.List<
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption.Builder>
getRequestHeadersToAddsBuilderList() {
return getRequestHeadersToAddsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption,
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption.Builder,
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOptionOrBuilder>
getRequestHeadersToAddsFieldBuilder() {
if (requestHeadersToAddsBuilder_ == null) {
requestHeadersToAddsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption,
com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption
.Builder,
com.google.cloud.compute.v1
.SecurityPolicyRuleHttpHeaderActionHttpHeaderOptionOrBuilder>(
requestHeadersToAdds_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
requestHeadersToAdds_ = null;
}
return requestHeadersToAddsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction)
private static final com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction();
}
public static com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SecurityPolicyRuleHttpHeaderAction> PARSER =
new com.google.protobuf.AbstractParser<SecurityPolicyRuleHttpHeaderAction>() {
@java.lang.Override
public SecurityPolicyRuleHttpHeaderAction parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SecurityPolicyRuleHttpHeaderAction> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SecurityPolicyRuleHttpHeaderAction> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.SecurityPolicyRuleHttpHeaderAction
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/phoenix | 37,198 | phoenix-core/src/it/java/org/apache/phoenix/end2end/transform/TransformMonitorIT.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.end2end.transform;
import static org.apache.phoenix.end2end.IndexRebuildTaskIT.waitForTaskState;
import static org.apache.phoenix.exception.SQLExceptionCode.CANNOT_CREATE_TENANT_SPECIFIC_TABLE;
import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
import static org.apache.phoenix.util.TestUtil.getRawRowCount;
import static org.apache.phoenix.util.TestUtil.getRowCount;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.List;
import java.util.Properties;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.coprocessor.TaskRegionObserver;
import org.apache.phoenix.coprocessor.tasks.TransformMonitorTask;
import org.apache.phoenix.end2end.ParallelStatsDisabledIT;
import org.apache.phoenix.end2end.index.SingleCellIndexIT;
import org.apache.phoenix.jdbc.ConnectionInfo;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
import org.apache.phoenix.query.ConnectionQueryServices;
import org.apache.phoenix.query.QueryServices;
import org.apache.phoenix.query.QueryServicesOptions;
import org.apache.phoenix.schema.PTable;
import org.apache.phoenix.schema.task.ServerTask;
import org.apache.phoenix.schema.task.SystemTaskParams;
import org.apache.phoenix.schema.task.Task;
import org.apache.phoenix.schema.transform.SystemTransformRecord;
import org.apache.phoenix.schema.transform.Transform;
import org.apache.phoenix.util.EnvironmentEdgeManager;
import org.apache.phoenix.util.MetaDataUtil;
import org.apache.phoenix.util.PhoenixRuntime;
import org.apache.phoenix.util.PropertiesUtil;
import org.apache.phoenix.util.SchemaUtil;
import org.junit.Before;
import org.junit.Test;
public class TransformMonitorIT extends ParallelStatsDisabledIT {
private static RegionCoprocessorEnvironment TaskRegionEnvironment;
private Properties testProps = PropertiesUtil.deepCopy(TEST_PROPERTIES);
public TransformMonitorIT() throws IOException, InterruptedException {
testProps.put(QueryServices.DEFAULT_IMMUTABLE_STORAGE_SCHEME_ATTRIB, "ONE_CELL_PER_COLUMN");
testProps.put(QueryServices.DEFAULT_COLUMN_ENCODED_BYTES_ATRRIB, "0");
testProps.put(QueryServices.PHOENIX_ACLS_ENABLED, "true");
TaskRegionEnvironment = (RegionCoprocessorEnvironment) getUtility()
.getRSForFirstRegionInTable(PhoenixDatabaseMetaData.SYSTEM_TASK_HBASE_TABLE_NAME)
.getRegions(PhoenixDatabaseMetaData.SYSTEM_TASK_HBASE_TABLE_NAME).get(0).getCoprocessorHost()
.findCoprocessorEnvironment(TaskRegionObserver.class.getName());
}
@Before
public void setupTest() throws Exception {
try (Connection conn = DriverManager.getConnection(getUrl(), testProps)) {
conn.setAutoCommit(true);
conn.createStatement()
.execute("DELETE FROM " + PhoenixDatabaseMetaData.SYSTEM_TRANSFORM_NAME);
conn.createStatement().execute("DELETE FROM " + PhoenixDatabaseMetaData.SYSTEM_TASK_NAME);
}
}
private void testTransformTable(boolean createIndex, boolean createView, boolean isImmutable)
throws Exception {
String schemaName = generateUniqueName();
String dataTableName = "TBL_" + generateUniqueName();
String dataTableFullName = SchemaUtil.getTableName(schemaName, dataTableName);
String newTableName = dataTableName + "_1";
String newTableFullName = SchemaUtil.getTableName(schemaName, newTableName);
String indexName = "IDX_" + generateUniqueName();
String indexName2 = "IDX_" + generateUniqueName();
String viewName = "VW_" + generateUniqueName();
String viewName2 = "VW2_" + generateUniqueName();
String viewIdxName = "VW_IDX_" + generateUniqueName();
String viewIdxName2 = "VW_IDX_" + generateUniqueName();
String view2IdxName1 = "VW2_IDX_" + generateUniqueName();
String indexFullName = SchemaUtil.getTableName(schemaName, indexName);
String createIndexStmt = "CREATE INDEX %s ON " + dataTableFullName + " (NAME) INCLUDE (ZIP) ";
String createViewStmt =
"CREATE VIEW %s ( VIEW_COL1 INTEGER, VIEW_COL2 VARCHAR ) AS SELECT * FROM "
+ dataTableFullName;
String createViewIdxSql =
"CREATE INDEX %s ON " + viewName + " (VIEW_COL1) include (VIEW_COL2) ";
try (PhoenixConnection conn =
(PhoenixConnection) DriverManager.getConnection(getUrl(), testProps)) {
conn.setAutoCommit(true);
int numOfRows = 10;
TransformToolIT.createTableAndUpsertRows(conn, dataTableFullName, numOfRows,
isImmutable ? " IMMUTABLE_ROWS=true" : "");
if (createIndex) {
conn.createStatement().execute(String.format(createIndexStmt, indexName));
}
if (createView) {
conn.createStatement().execute(String.format(createViewStmt, viewName));
conn.createStatement().execute(String.format(createViewIdxSql, viewIdxName));
conn.createStatement().execute("UPSERT INTO " + viewName
+ "(ID, NAME, VIEW_COL1, VIEW_COL2) VALUES (1, 'uname11', 100, 'viewCol2')");
}
assertMetadata(conn, PTable.ImmutableStorageScheme.ONE_CELL_PER_COLUMN,
PTable.QualifierEncodingScheme.NON_ENCODED_QUALIFIERS, dataTableFullName);
conn.createStatement().execute("ALTER TABLE " + dataTableFullName
+ " SET IMMUTABLE_STORAGE_SCHEME=SINGLE_CELL_ARRAY_WITH_OFFSETS, COLUMN_ENCODED_BYTES=2");
SystemTransformRecord record = Transform.getTransformRecord(schemaName, dataTableName, null,
null, conn.unwrap(PhoenixConnection.class));
assertNotNull(record);
List<Task.TaskRecord> taskRecordList = Task.queryTaskTable(conn, null);
assertEquals(1, taskRecordList.size());
assertEquals(PTable.TaskType.TRANSFORM_MONITOR, taskRecordList.get(0).getTaskType());
assertEquals(schemaName, taskRecordList.get(0).getSchemaName());
assertEquals(dataTableName, taskRecordList.get(0).getTableName());
waitForTransformToGetToState(conn.unwrap(PhoenixConnection.class), record,
PTable.TransformStatus.COMPLETED);
// Test that the PhysicalTableName is updated.
PTable oldTable = conn.getTableNoCache(dataTableFullName);
assertEquals(newTableName, oldTable.getPhysicalName(true).getString());
assertMetadata(conn, PTable.ImmutableStorageScheme.SINGLE_CELL_ARRAY_WITH_OFFSETS,
PTable.QualifierEncodingScheme.TWO_BYTE_QUALIFIERS, record.getNewPhysicalTableName());
assertMetadata(conn, PTable.ImmutableStorageScheme.SINGLE_CELL_ARRAY_WITH_OFFSETS,
PTable.QualifierEncodingScheme.TWO_BYTE_QUALIFIERS, dataTableFullName);
ConnectionQueryServices cqs = conn.unwrap(PhoenixConnection.class).getQueryServices();
long newRowCount = countRows(conn, newTableFullName);
assertEquals(getRawRowCount(cqs.getTable(Bytes.toBytes(dataTableFullName))), newRowCount);
if (createIndex) {
assertEquals(newRowCount, countRows(conn, indexFullName));
int additionalRows = 2;
// Upsert new rows to new table. Note that after transform is complete, we are using the new
// table
TransformToolIT.upsertRows(conn, dataTableFullName, (int) newRowCount + 1, additionalRows);
assertEquals(newRowCount + additionalRows, countRows(conn, indexFullName));
assertEquals(newRowCount, getRawRowCount(cqs.getTable(Bytes.toBytes(dataTableFullName))));
// Create another index on the new table and count
Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();
TableName hTableName = TableName.valueOf(dataTableFullName);
admin.disableTable(hTableName);
admin.deleteTable(hTableName);
conn.createStatement().execute(String.format(createIndexStmt, indexName2));
assertEquals(newRowCount + additionalRows, countRows(conn, dataTableFullName));
assertEquals(newRowCount + additionalRows,
countRows(conn, SchemaUtil.getTableName(schemaName, indexName2)));
} else if (createView) {
assertEquals(numOfRows, countRows(conn, viewName));
assertEquals(numOfRows, countRowsForViewIndex(conn, dataTableFullName));
assertMetadata(conn, PTable.ImmutableStorageScheme.SINGLE_CELL_ARRAY_WITH_OFFSETS,
PTable.QualifierEncodingScheme.TWO_BYTE_QUALIFIERS, viewName);
conn.unwrap(PhoenixConnection.class).getQueryServices().clearCache();
ResultSet rs = conn.createStatement()
.executeQuery("SELECT VIEW_COL2 FROM " + viewName + " WHERE VIEW_COL1=100");
assertTrue(rs.next());
assertEquals("viewCol2", rs.getString(1));
assertFalse(rs.next());
int additionalRows = 2;
// Upsert new rows to new table. Note that after transform is complete, we are using the new
// table
TransformToolIT.upsertRows(conn, viewName, (int) newRowCount + 1, additionalRows);
assertEquals(newRowCount + additionalRows, getRowCount(conn, viewName));
assertEquals(newRowCount + additionalRows, countRowsForViewIndex(conn, dataTableFullName));
// Drop view index and create another on the new table and count
conn.createStatement().execute("DROP INDEX " + viewIdxName + " ON " + viewName);
Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();
TableName hTableName = TableName.valueOf(dataTableFullName);
admin.disableTable(hTableName);
admin.deleteTable(hTableName);
conn.createStatement().execute(String.format(createViewIdxSql, viewIdxName2));
assertEquals(newRowCount + additionalRows, countRowsForViewIndex(conn, dataTableFullName));
// Create another view and have a new index on top
conn.createStatement().execute(String.format(createViewStmt, viewName2));
conn.createStatement().execute(String.format(createViewIdxSql, view2IdxName1));
assertEquals((newRowCount + additionalRows) * 2,
countRowsForViewIndex(conn, dataTableFullName));
conn.createStatement().execute("UPSERT INTO " + viewName2
+ "(ID, NAME, VIEW_COL1, VIEW_COL2) VALUES (100, 'uname100', 1000, 'viewCol100')");
rs = conn.createStatement()
.executeQuery("SELECT VIEW_COL2, NAME FROM " + viewName2 + " WHERE VIEW_COL1=1000");
assertTrue(rs.next());
assertEquals("viewCol100", rs.getString(1));
assertEquals("uname100", rs.getString(2));
assertFalse(rs.next());
}
}
}
public static int countRows(Connection conn, String tableFullName) throws SQLException {
ResultSet count =
conn.createStatement().executeQuery("select /*+ NO_INDEX*/ count(*) from " + tableFullName);
count.next();
int numRows = count.getInt(1);
return numRows;
}
protected int countRowsForViewIndex(Connection conn, String baseTable)
throws IOException, SQLException {
String viewIndexTableName = MetaDataUtil.getViewIndexPhysicalName(baseTable);
ConnectionQueryServices queryServices = conn.unwrap(PhoenixConnection.class).getQueryServices();
Table indexHTable = queryServices.getTable(Bytes.toBytes(viewIndexTableName));
// If there are multiple indexes on this view, this will return rows for others as well. For 1
// view index, it is fine.
return getUtility().countRows(indexHTable);
}
@Test
public void testTransformMonitor_mutableTableWithoutIndex() throws Exception {
testTransformTable(false, false, false);
}
@Test
public void testTransformMonitor_immutableTableWithoutIndex() throws Exception {
testTransformTable(false, false, true);
}
@Test
public void testTransformMonitor_immutableTableWithIndex() throws Exception {
testTransformTable(true, false, true);
}
@Test
public void testTransformMonitor_pausedTransform() throws Exception {
testTransformMonitor_checkStates(PTable.TransformStatus.PAUSED, PTable.TaskStatus.COMPLETED);
}
@Test
public void testTransformMonitor_completedTransform() throws Exception {
testTransformMonitor_checkStates(PTable.TransformStatus.COMPLETED, PTable.TaskStatus.COMPLETED);
}
@Test
public void testTransformMonitor_failedTransform() throws Exception {
testTransformMonitor_checkStates(PTable.TransformStatus.FAILED, PTable.TaskStatus.FAILED);
}
private void testTransformMonitor_checkStates(PTable.TransformStatus transformStatus,
PTable.TaskStatus taskStatus) throws Exception {
try (Connection conn = DriverManager.getConnection(getUrl(), testProps)) {
conn.setAutoCommit(true);
SystemTransformRecord.SystemTransformBuilder transformBuilder =
new SystemTransformRecord.SystemTransformBuilder();
String logicalTableName = generateUniqueName();
transformBuilder.setLogicalTableName(logicalTableName);
transformBuilder.setTransformStatus(transformStatus.name());
transformBuilder.setNewPhysicalTableName(logicalTableName + "_1");
Transform.upsertTransform(transformBuilder.build(), conn.unwrap(PhoenixConnection.class));
TaskRegionObserver.SelfHealingTask task = new TaskRegionObserver.SelfHealingTask(
TaskRegionEnvironment, QueryServicesOptions.DEFAULT_TASK_HANDLING_MAX_INTERVAL_MS);
Timestamp startTs = new Timestamp(EnvironmentEdgeManager.currentTimeMillis());
ServerTask.addTask(
new SystemTaskParams.SystemTaskParamsBuilder().setConn(conn.unwrap(PhoenixConnection.class))
.setTaskType(PTable.TaskType.TRANSFORM_MONITOR).setTenantId(null).setSchemaName(null)
.setTableName(logicalTableName).setTaskStatus(PTable.TaskStatus.CREATED.toString())
.setData(null).setPriority(null).setStartTs(startTs).setEndTs(null).build());
task.run();
waitForTaskState(conn, PTable.TaskType.TRANSFORM_MONITOR, logicalTableName, taskStatus);
}
}
@Test
public void testTransformMonitor_pauseAndResumeTransform() throws Exception {
String schemaName = generateUniqueName();
String dataTableName = generateUniqueName();
try (Connection conn = DriverManager.getConnection(getUrl(), testProps)) {
conn.setAutoCommit(true);
TransformToolIT.pauseTableTransform(schemaName, dataTableName, conn, "");
List<String> args = TransformToolIT.getArgList(schemaName, dataTableName, null, null, null,
null, false, false, true, false, false);
// This run resumes transform and TransformMonitor task runs and completes it
TransformToolIT.runTransformTool(args.toArray(new String[0]), 0);
SystemTransformRecord record = Transform.getTransformRecord(schemaName, dataTableName, null,
null, conn.unwrap(PhoenixConnection.class));
List<Task.TaskRecord> taskRecordList = Task.queryTaskTable(conn, null);
assertEquals(1, taskRecordList.size());
assertEquals(PTable.TaskType.TRANSFORM_MONITOR, taskRecordList.get(0).getTaskType());
assertEquals(schemaName, taskRecordList.get(0).getSchemaName());
assertEquals(dataTableName, taskRecordList.get(0).getTableName());
waitForTaskState(conn, PTable.TaskType.TRANSFORM_MONITOR, dataTableName,
PTable.TaskStatus.COMPLETED);
}
}
@Test
public void testTransformMonitor_mutableTableWithIndex() throws Exception {
testTransformTable(true, false, false);
}
@Test
public void testTransformMonitor_tableWithViews() throws Exception {
testTransformTable(false, true, false);
}
@Test
public void testTransformMonitor_index() throws Exception {
String schemaName = generateUniqueName();
String dataTableName = "TBL_" + generateUniqueName();
String dataTableFullName = SchemaUtil.getTableName(schemaName, dataTableName);
String indexName = "IDX_" + generateUniqueName();
String indexFullName = SchemaUtil.getTableName(schemaName, indexName);
String newTableFullName = indexFullName + "_1";
String createIndexStmt =
"CREATE INDEX " + indexName + " ON " + dataTableFullName + " (ZIP) INCLUDE (NAME) ";
try (PhoenixConnection conn =
(PhoenixConnection) DriverManager.getConnection(getUrl(), testProps)) {
conn.setAutoCommit(true);
int numOfRows = 10;
TransformToolIT.createTableAndUpsertRows(conn, dataTableFullName, numOfRows, "");
conn.createStatement().execute(createIndexStmt);
assertMetadata(conn, PTable.ImmutableStorageScheme.ONE_CELL_PER_COLUMN,
PTable.QualifierEncodingScheme.NON_ENCODED_QUALIFIERS, indexFullName);
conn.createStatement().execute("ALTER INDEX " + indexName + " ON " + dataTableFullName
+ " ACTIVE IMMUTABLE_STORAGE_SCHEME=SINGLE_CELL_ARRAY_WITH_OFFSETS, COLUMN_ENCODED_BYTES=2");
SystemTransformRecord record = Transform.getTransformRecord(schemaName, indexName,
dataTableFullName, null, conn.unwrap(PhoenixConnection.class));
assertNotNull(record);
List<Task.TaskRecord> taskRecordList = Task.queryTaskTable(conn, null);
assertEquals(1, taskRecordList.size());
assertEquals(PTable.TaskType.TRANSFORM_MONITOR, taskRecordList.get(0).getTaskType());
assertEquals(schemaName, taskRecordList.get(0).getSchemaName());
assertEquals(indexName, taskRecordList.get(0).getTableName());
waitForTransformToGetToState(conn.unwrap(PhoenixConnection.class), record,
PTable.TransformStatus.COMPLETED);
// Test that the PhysicalTableName is updated.
PTable oldTable = conn.getTableNoCache(indexFullName);
assertEquals(indexName + "_1", oldTable.getPhysicalName(true).getString());
assertMetadata(conn, PTable.ImmutableStorageScheme.SINGLE_CELL_ARRAY_WITH_OFFSETS,
PTable.QualifierEncodingScheme.TWO_BYTE_QUALIFIERS, newTableFullName);
ConnectionQueryServices cqs = conn.unwrap(PhoenixConnection.class).getQueryServices();
long newRowCount = countRows(conn, newTableFullName);
assertEquals(getRawRowCount(cqs.getTable(Bytes.toBytes(indexFullName))), newRowCount);
}
}
@Test
public void testTransformTableWithTenantViews() throws Exception {
String tenantId = generateUniqueName();
String dataTableName = generateUniqueName();
String viewTenantName = "TENANTVW_" + generateUniqueName();
String createTblStr = "CREATE TABLE %s (TENANT_ID VARCHAR(15) NOT NULL,ID INTEGER NOT NULL"
+ ", NAME VARCHAR, CONSTRAINT PK_1 PRIMARY KEY (TENANT_ID, ID)) MULTI_TENANT=true";
String createViewStr = "CREATE VIEW %s (VIEW_COL1 VARCHAR) AS SELECT * FROM %s";
String upsertQueryStr =
"UPSERT INTO %s (TENANT_ID, ID, NAME, VIEW_COL1) VALUES('%s' , %d, '%s', '%s')";
Properties props = PropertiesUtil.deepCopy(testProps);
Connection connGlobal = null;
Connection connTenant = null;
try {
connGlobal = DriverManager.getConnection(getUrl(), props);
props.setProperty(PhoenixRuntime.TENANT_ID_ATTRIB, tenantId);
connTenant = DriverManager.getConnection(getUrl(), props);
connTenant.setAutoCommit(true);
String tableStmtGlobal = String.format(createTblStr, dataTableName);
connGlobal.createStatement().execute(tableStmtGlobal);
assertMetadata(connGlobal, PTable.ImmutableStorageScheme.ONE_CELL_PER_COLUMN,
PTable.QualifierEncodingScheme.NON_ENCODED_QUALIFIERS, dataTableName);
String viewStmtTenant = String.format(createViewStr, viewTenantName, dataTableName);
connTenant.createStatement().execute(viewStmtTenant);
// TODO: Fix this as part of implementing TransformTool so that the tenant view rows could be
// read from the tool
// connTenant.createStatement()
// .execute(String.format(upsertQueryStr, viewTenantName, tenantId, 1, "x", "xx"));
try {
connTenant.createStatement().execute("ALTER TABLE " + dataTableName
+ " SET IMMUTABLE_STORAGE_SCHEME=SINGLE_CELL_ARRAY_WITH_OFFSETS, COLUMN_ENCODED_BYTES=2");
fail("Tenant connection cannot do alter");
} catch (SQLException e) {
assertEquals(CANNOT_CREATE_TENANT_SPECIFIC_TABLE.getErrorCode(), e.getErrorCode());
}
connGlobal.createStatement().execute("ALTER TABLE " + dataTableName
+ " SET IMMUTABLE_STORAGE_SCHEME=SINGLE_CELL_ARRAY_WITH_OFFSETS, COLUMN_ENCODED_BYTES=2");
SystemTransformRecord tableRecord = Transform.getTransformRecord(null, dataTableName, null,
null, connGlobal.unwrap(PhoenixConnection.class));
assertNotNull(tableRecord);
waitForTransformToGetToState(connGlobal.unwrap(PhoenixConnection.class), tableRecord,
PTable.TransformStatus.COMPLETED);
connTenant.createStatement()
.execute(String.format(upsertQueryStr, viewTenantName, tenantId, 2, "y", "yy"));
ResultSet rs = connTenant.createStatement()
.executeQuery("SELECT /*+ NO_INDEX */ VIEW_COL1 FROM " + viewTenantName);
assertTrue(rs.next());
// assertEquals("xx", rs.getString(1));
// assertTrue(rs.next());
assertEquals("yy", rs.getString(1));
assertFalse(rs.next());
} finally {
if (connGlobal != null) {
connGlobal.close();
}
if (connTenant != null) {
connTenant.close();
}
}
}
@Test
public void testTransformAlreadyTransformedIndex() throws Exception {
String dataTableName = "TBL_" + generateUniqueName();
String indexName = "IDX_" + generateUniqueName();
String createIndexStmt = "CREATE INDEX %s ON " + dataTableName + " (NAME) INCLUDE (ZIP) ";
try (Connection conn = DriverManager.getConnection(getUrl(), testProps)) {
conn.setAutoCommit(true);
int numOfRows = 1;
TransformToolIT.createTableAndUpsertRows(conn, dataTableName, numOfRows, "");
conn.createStatement().execute(String.format(createIndexStmt, indexName));
assertEquals(numOfRows, countRows(conn, indexName));
assertMetadata(conn, PTable.ImmutableStorageScheme.ONE_CELL_PER_COLUMN,
PTable.QualifierEncodingScheme.NON_ENCODED_QUALIFIERS, indexName);
conn.createStatement().execute("ALTER INDEX " + indexName + " ON " + dataTableName
+ " ACTIVE IMMUTABLE_STORAGE_SCHEME=SINGLE_CELL_ARRAY_WITH_OFFSETS, COLUMN_ENCODED_BYTES=2");
SystemTransformRecord record = Transform.getTransformRecord(null, indexName, dataTableName,
null, conn.unwrap(PhoenixConnection.class));
assertNotNull(record);
waitForTransformToGetToState(conn.unwrap(PhoenixConnection.class), record,
PTable.TransformStatus.COMPLETED);
assertMetadata(conn, PTable.ImmutableStorageScheme.SINGLE_CELL_ARRAY_WITH_OFFSETS,
PTable.QualifierEncodingScheme.TWO_BYTE_QUALIFIERS, record.getNewPhysicalTableName());
TransformToolIT.upsertRows(conn, dataTableName, 2, 1);
// Removing this so that we are sure that we are not picking up the old transform record.
Transform.removeTransformRecord(record, conn.unwrap(PhoenixConnection.class));
conn.createStatement().execute("ALTER INDEX " + indexName + " ON " + dataTableName
+ " ACTIVE SET IMMUTABLE_STORAGE_SCHEME=ONE_CELL_PER_COLUMN, COLUMN_ENCODED_BYTES=0");
record = Transform.getTransformRecord(null, indexName, dataTableName, null,
conn.unwrap(PhoenixConnection.class));
assertNotNull(record);
waitForTransformToGetToState(conn.unwrap(PhoenixConnection.class), record,
PTable.TransformStatus.COMPLETED);
TransformToolIT.upsertRows(conn, dataTableName, 3, 1);
assertEquals(numOfRows + 2, countRows(conn, indexName));
assertMetadata(conn, PTable.ImmutableStorageScheme.ONE_CELL_PER_COLUMN,
PTable.QualifierEncodingScheme.NON_ENCODED_QUALIFIERS, record.getNewPhysicalTableName());
ResultSet rs =
conn.createStatement().executeQuery("SELECT \":ID\", \"0:ZIP\" FROM " + indexName);
assertTrue(rs.next());
assertEquals("1", rs.getString(1));
assertEquals(95051, rs.getInt(2));
assertTrue(rs.next());
assertEquals("2", rs.getString(1));
assertEquals(95052, rs.getInt(2));
assertTrue(rs.next());
assertEquals("3", rs.getString(1));
assertEquals(95053, rs.getInt(2));
assertFalse(rs.next());
}
}
@Test
public void testTransformAlreadyTransformedTable() throws Exception {
String dataTableName = "TBL_" + generateUniqueName();
try (Connection conn = DriverManager.getConnection(getUrl(), testProps)) {
conn.setAutoCommit(true);
int numOfRows = 1;
String stmString1 = "CREATE TABLE IF NOT EXISTS " + dataTableName
+ " (ID INTEGER NOT NULL, CITY_PK VARCHAR NOT NULL, NAME_PK VARCHAR NOT NULL,NAME VARCHAR, ZIP INTEGER CONSTRAINT PK PRIMARY KEY(ID, CITY_PK, NAME_PK)) ";
conn.createStatement().execute(stmString1);
String upsertQuery = "UPSERT INTO %s VALUES(%d, '%s', '%s', '%s', %d)";
// insert rows
conn.createStatement()
.execute(String.format(upsertQuery, dataTableName, 1, "city1", "name1", "uname1", 95051));
assertMetadata(conn, PTable.ImmutableStorageScheme.ONE_CELL_PER_COLUMN,
PTable.QualifierEncodingScheme.NON_ENCODED_QUALIFIERS, dataTableName);
conn.createStatement().execute("ALTER TABLE " + dataTableName
+ " SET IMMUTABLE_STORAGE_SCHEME=SINGLE_CELL_ARRAY_WITH_OFFSETS, COLUMN_ENCODED_BYTES=2");
SystemTransformRecord record = Transform.getTransformRecord(null, dataTableName, null, null,
conn.unwrap(PhoenixConnection.class));
assertNotNull(record);
waitForTransformToGetToState(conn.unwrap(PhoenixConnection.class), record,
PTable.TransformStatus.COMPLETED);
assertMetadata(conn, PTable.ImmutableStorageScheme.SINGLE_CELL_ARRAY_WITH_OFFSETS,
PTable.QualifierEncodingScheme.TWO_BYTE_QUALIFIERS, record.getNewPhysicalTableName());
conn.createStatement()
.execute(String.format(upsertQuery, dataTableName, 2, "city2", "name2", "uname2", 95052));
assertEquals(numOfRows + 1, countRows(conn, dataTableName));
// Make sure that we are not accessing the original table. We are supposed to read from the
// new table above
Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();
TableName hTableName = TableName.valueOf(dataTableName);
admin.disableTable(hTableName);
admin.deleteTable(hTableName);
// Removing this so that we are sure that we are not picking up the old transform record.
Transform.removeTransformRecord(record, conn.unwrap(PhoenixConnection.class));
conn.createStatement().execute("ALTER TABLE " + dataTableName
+ " SET IMMUTABLE_STORAGE_SCHEME=ONE_CELL_PER_COLUMN, COLUMN_ENCODED_BYTES=0");
record = Transform.getTransformRecord(null, dataTableName, null, null,
conn.unwrap(PhoenixConnection.class));
assertNotNull(record);
waitForTransformToGetToState(conn.unwrap(PhoenixConnection.class), record,
PTable.TransformStatus.COMPLETED);
assertMetadata(conn, PTable.ImmutableStorageScheme.ONE_CELL_PER_COLUMN,
PTable.QualifierEncodingScheme.NON_ENCODED_QUALIFIERS, record.getNewPhysicalTableName());
conn.createStatement()
.execute(String.format(upsertQuery, dataTableName, 3, "city3", "name3", "uname3", 95053));
assertEquals(numOfRows + 2, countRows(conn, dataTableName));
ResultSet rs = conn.createStatement()
.executeQuery("SELECT ID, ZIP, NAME, NAME_PK, CITY_PK FROM " + dataTableName);
assertTrue(rs.next());
assertEquals("1", rs.getString(1));
assertEquals(95051, rs.getInt(2));
assertEquals("uname1", rs.getString(3));
assertEquals("name1", rs.getString(4));
assertEquals("city1", rs.getString(5));
assertTrue(rs.next());
assertEquals("2", rs.getString(1));
assertEquals(95052, rs.getInt(2));
assertEquals("uname2", rs.getString(3));
assertEquals("name2", rs.getString(4));
assertEquals("city2", rs.getString(5));
assertTrue(rs.next());
assertEquals("3", rs.getString(1));
assertEquals(95053, rs.getInt(2));
assertEquals("uname3", rs.getString(3));
assertEquals("name3", rs.getString(4));
assertEquals("city3", rs.getString(5));
assertFalse(rs.next());
}
}
public void testDifferentClientAccessTransformedTable(boolean isImmutable) throws Exception {
String dataTableName = "TBL_" + generateUniqueName();
try (Connection conn1 = DriverManager.getConnection(getUrl(), testProps)) {
conn1.setAutoCommit(true);
int numOfRows = 1;
TransformToolIT.createTableAndUpsertRows(conn1, dataTableName, numOfRows,
isImmutable ? " IMMUTABLE_ROWS=true" : "");
String url2 =
ConnectionInfo.create(url, null, null).withPrincipal("LongRunningQueries").toUrl();
try (Connection conn2 =
DriverManager.getConnection(url2, PropertiesUtil.deepCopy(TEST_PROPERTIES))) {
conn2.setAutoCommit(true);
TransformToolIT.upsertRows(conn2, dataTableName, 2, 1);
conn1.createStatement().execute("ALTER TABLE " + dataTableName
+ " SET IMMUTABLE_STORAGE_SCHEME=SINGLE_CELL_ARRAY_WITH_OFFSETS, COLUMN_ENCODED_BYTES=2");
SystemTransformRecord record = Transform.getTransformRecord(null, dataTableName, null, null,
conn1.unwrap(PhoenixConnection.class));
assertNotNull(record);
waitForTransformToGetToState(conn1.unwrap(PhoenixConnection.class), record,
PTable.TransformStatus.COMPLETED);
assertMetadata(conn1, PTable.ImmutableStorageScheme.SINGLE_CELL_ARRAY_WITH_OFFSETS,
PTable.QualifierEncodingScheme.TWO_BYTE_QUALIFIERS, record.getNewPhysicalTableName());
// A connection does transform and another connection doesn't try to upsert into old table
TransformToolIT.upsertRows(conn2, dataTableName, 3, 1);
ResultSet rs =
conn2.createStatement().executeQuery("SELECT ID, NAME, ZIP FROM " + dataTableName);
assertTrue(rs.next());
assertEquals("1", rs.getString(1));
assertEquals("uname1", rs.getString(2));
assertEquals(95051, rs.getInt(3));
assertTrue(rs.next());
assertEquals("2", rs.getString(1));
assertEquals("uname2", rs.getString(2));
assertEquals(95052, rs.getInt(3));
assertTrue(rs.next());
assertEquals("3", rs.getString(1));
assertEquals("uname3", rs.getString(2));
assertEquals(95053, rs.getInt(3));
assertFalse(rs.next());
}
}
}
@Test
public void testDifferentClientAccessTransformedTable_mutable() throws Exception {
// A connection does transform and another connection doesn't try to upsert into old table
testDifferentClientAccessTransformedTable(false);
}
@Test
public void testDifferentClientAccessTransformedTable_immutable() throws Exception {
// A connection does transform and another connection doesn't try to upsert into old table
testDifferentClientAccessTransformedTable(true);
}
@Test
public void testTransformTable_cutoverNotAuto() throws Exception {
// Transform index and see it is not auto cutover
String schemaName = generateUniqueName();
String dataTableName = "TBL_" + generateUniqueName();
String dataTableFullName = SchemaUtil.getTableName(schemaName, dataTableName);
try (Connection conn = DriverManager.getConnection(getUrl(), testProps)) {
TransformMonitorTask.disableTransformMonitorTask(true);
conn.setAutoCommit(true);
int numOfRows = 1;
TransformToolIT.createTableAndUpsertRows(conn, dataTableFullName, numOfRows, "");
assertMetadata(conn, PTable.ImmutableStorageScheme.ONE_CELL_PER_COLUMN,
PTable.QualifierEncodingScheme.NON_ENCODED_QUALIFIERS, dataTableFullName);
conn.createStatement().execute("ALTER TABLE " + dataTableFullName
+ " SET IMMUTABLE_STORAGE_SCHEME=SINGLE_CELL_ARRAY_WITH_OFFSETS, COLUMN_ENCODED_BYTES=2");
SystemTransformRecord record = Transform.getTransformRecord(schemaName, dataTableName, null,
null, conn.unwrap(PhoenixConnection.class));
assertNotNull(record);
// Wait for task to fail
waitForTaskState(conn, PTable.TaskType.TRANSFORM_MONITOR, dataTableName,
PTable.TaskStatus.FAILED);
} finally {
TransformMonitorTask.disableTransformMonitorTask(false);
}
}
@Test
public void testTransformMonitor_tableWithViews_OnOldAndNew() throws Exception {
// Create view before and after transform with different select statements and check
String schemaName = "S_" + generateUniqueName();
String dataTableName = "TBL_" + generateUniqueName();
String fullDataTableName = SchemaUtil.getTableName(schemaName, dataTableName);
String view1 = "VW1_" + generateUniqueName();
String view2 = "VW2_" + generateUniqueName();
String createTblStr = "CREATE TABLE %s (ID INTEGER NOT NULL, PK1 VARCHAR NOT NULL"
+ ", NAME VARCHAR CONSTRAINT PK_1 PRIMARY KEY (ID, PK1)) ";
String createViewStr =
"CREATE VIEW %s (VIEW_COL1 VARCHAR) AS SELECT * FROM %s WHERE NAME='%s'";
try (Connection conn = DriverManager.getConnection(getUrl(), testProps)) {
conn.setAutoCommit(true);
conn.createStatement().execute(String.format(createTblStr, fullDataTableName));
int numOfRows = 2;
String upsertQuery = String.format("UPSERT INTO %s VALUES(?, ?, ?)", fullDataTableName);
PreparedStatement stmt1 = conn.prepareStatement(upsertQuery);
for (int i = 1; i <= numOfRows; i++) {
stmt1.setInt(1, i);
stmt1.setString(2, "pk" + i);
stmt1.setString(3, "name" + i);
stmt1.execute();
}
conn.createStatement()
.execute(String.format(createViewStr, view1, fullDataTableName, "name1"));
assertMetadata(conn, PTable.ImmutableStorageScheme.ONE_CELL_PER_COLUMN,
PTable.QualifierEncodingScheme.NON_ENCODED_QUALIFIERS, fullDataTableName);
conn.createStatement().execute("ALTER TABLE " + fullDataTableName
+ " SET IMMUTABLE_STORAGE_SCHEME=SINGLE_CELL_ARRAY_WITH_OFFSETS, COLUMN_ENCODED_BYTES=2");
SystemTransformRecord record = Transform.getTransformRecord(schemaName, dataTableName, null,
null, conn.unwrap(PhoenixConnection.class));
assertNotNull(record);
waitForTransformToGetToState(conn.unwrap(PhoenixConnection.class), record,
PTable.TransformStatus.COMPLETED);
assertMetadata(conn, PTable.ImmutableStorageScheme.SINGLE_CELL_ARRAY_WITH_OFFSETS,
PTable.QualifierEncodingScheme.TWO_BYTE_QUALIFIERS, record.getNewPhysicalTableName());
conn.createStatement()
.execute(String.format(createViewStr, view2, fullDataTableName, "name2"));
ResultSet rs = conn.createStatement().executeQuery("SELECT * FROM " + view2);
assertTrue(rs.next());
assertEquals(2, rs.getInt(1));
assertEquals("pk2", rs.getString(2));
assertFalse(rs.next());
rs = conn.createStatement().executeQuery("SELECT * FROM " + view1);
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals("pk1", rs.getString(2));
assertFalse(rs.next());
}
}
public static void waitForTransformToGetToState(PhoenixConnection conn,
SystemTransformRecord record, PTable.TransformStatus status)
throws InterruptedException, SQLException {
int maxTries = 250, nTries = 0;
String lastStatus = "";
do {
if (status.name().equals(record.getTransformStatus())) {
return;
}
Thread.sleep(500);
record = Transform.getTransformRecord(record.getSchemaName(), record.getLogicalTableName(),
record.getLogicalParentName(), record.getTenantId(), conn);
lastStatus = record.getTransformStatus();
} while (++nTries < maxTries);
try {
SingleCellIndexIT.dumpTable("SYSTEM.TASK");
} catch (Exception e) {
}
fail("Ran out of time waiting for transform state to become " + status + " but it was "
+ lastStatus);
}
}
|
googleapis/google-cloud-java | 36,930 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/TunedModelRef.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/tuning_job.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* TunedModel Reference for legacy model migration.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.TunedModelRef}
*/
public final class TunedModelRef extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.TunedModelRef)
TunedModelRefOrBuilder {
private static final long serialVersionUID = 0L;
// Use TunedModelRef.newBuilder() to construct.
private TunedModelRef(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TunedModelRef() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new TunedModelRef();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.TuningJobProto
.internal_static_google_cloud_aiplatform_v1_TunedModelRef_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.TuningJobProto
.internal_static_google_cloud_aiplatform_v1_TunedModelRef_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.TunedModelRef.class,
com.google.cloud.aiplatform.v1.TunedModelRef.Builder.class);
}
private int tunedModelRefCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object tunedModelRef_;
public enum TunedModelRefCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
TUNED_MODEL(1),
TUNING_JOB(2),
PIPELINE_JOB(3),
TUNEDMODELREF_NOT_SET(0);
private final int value;
private TunedModelRefCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static TunedModelRefCase valueOf(int value) {
return forNumber(value);
}
public static TunedModelRefCase forNumber(int value) {
switch (value) {
case 1:
return TUNED_MODEL;
case 2:
return TUNING_JOB;
case 3:
return PIPELINE_JOB;
case 0:
return TUNEDMODELREF_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public TunedModelRefCase getTunedModelRefCase() {
return TunedModelRefCase.forNumber(tunedModelRefCase_);
}
public static final int TUNED_MODEL_FIELD_NUMBER = 1;
/**
*
*
* <pre>
* Support migration from model registry.
* </pre>
*
* <code>string tuned_model = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return Whether the tunedModel field is set.
*/
public boolean hasTunedModel() {
return tunedModelRefCase_ == 1;
}
/**
*
*
* <pre>
* Support migration from model registry.
* </pre>
*
* <code>string tuned_model = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return The tunedModel.
*/
public java.lang.String getTunedModel() {
java.lang.Object ref = "";
if (tunedModelRefCase_ == 1) {
ref = tunedModelRef_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (tunedModelRefCase_ == 1) {
tunedModelRef_ = s;
}
return s;
}
}
/**
*
*
* <pre>
* Support migration from model registry.
* </pre>
*
* <code>string tuned_model = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return The bytes for tunedModel.
*/
public com.google.protobuf.ByteString getTunedModelBytes() {
java.lang.Object ref = "";
if (tunedModelRefCase_ == 1) {
ref = tunedModelRef_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (tunedModelRefCase_ == 1) {
tunedModelRef_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TUNING_JOB_FIELD_NUMBER = 2;
/**
*
*
* <pre>
* Support migration from tuning job list page, from gemini-1.0-pro-002
* to 1.5 and above.
* </pre>
*
* <code>string tuning_job = 2 [(.google.api.resource_reference) = { ... }</code>
*
* @return Whether the tuningJob field is set.
*/
public boolean hasTuningJob() {
return tunedModelRefCase_ == 2;
}
/**
*
*
* <pre>
* Support migration from tuning job list page, from gemini-1.0-pro-002
* to 1.5 and above.
* </pre>
*
* <code>string tuning_job = 2 [(.google.api.resource_reference) = { ... }</code>
*
* @return The tuningJob.
*/
public java.lang.String getTuningJob() {
java.lang.Object ref = "";
if (tunedModelRefCase_ == 2) {
ref = tunedModelRef_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (tunedModelRefCase_ == 2) {
tunedModelRef_ = s;
}
return s;
}
}
/**
*
*
* <pre>
* Support migration from tuning job list page, from gemini-1.0-pro-002
* to 1.5 and above.
* </pre>
*
* <code>string tuning_job = 2 [(.google.api.resource_reference) = { ... }</code>
*
* @return The bytes for tuningJob.
*/
public com.google.protobuf.ByteString getTuningJobBytes() {
java.lang.Object ref = "";
if (tunedModelRefCase_ == 2) {
ref = tunedModelRef_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (tunedModelRefCase_ == 2) {
tunedModelRef_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PIPELINE_JOB_FIELD_NUMBER = 3;
/**
*
*
* <pre>
* Support migration from tuning job list page, from bison model to gemini
* model.
* </pre>
*
* <code>string pipeline_job = 3 [(.google.api.resource_reference) = { ... }</code>
*
* @return Whether the pipelineJob field is set.
*/
public boolean hasPipelineJob() {
return tunedModelRefCase_ == 3;
}
/**
*
*
* <pre>
* Support migration from tuning job list page, from bison model to gemini
* model.
* </pre>
*
* <code>string pipeline_job = 3 [(.google.api.resource_reference) = { ... }</code>
*
* @return The pipelineJob.
*/
public java.lang.String getPipelineJob() {
java.lang.Object ref = "";
if (tunedModelRefCase_ == 3) {
ref = tunedModelRef_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (tunedModelRefCase_ == 3) {
tunedModelRef_ = s;
}
return s;
}
}
/**
*
*
* <pre>
* Support migration from tuning job list page, from bison model to gemini
* model.
* </pre>
*
* <code>string pipeline_job = 3 [(.google.api.resource_reference) = { ... }</code>
*
* @return The bytes for pipelineJob.
*/
public com.google.protobuf.ByteString getPipelineJobBytes() {
java.lang.Object ref = "";
if (tunedModelRefCase_ == 3) {
ref = tunedModelRef_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (tunedModelRefCase_ == 3) {
tunedModelRef_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (tunedModelRefCase_ == 1) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, tunedModelRef_);
}
if (tunedModelRefCase_ == 2) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, tunedModelRef_);
}
if (tunedModelRefCase_ == 3) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, tunedModelRef_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (tunedModelRefCase_ == 1) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, tunedModelRef_);
}
if (tunedModelRefCase_ == 2) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, tunedModelRef_);
}
if (tunedModelRefCase_ == 3) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, tunedModelRef_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.TunedModelRef)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.TunedModelRef other =
(com.google.cloud.aiplatform.v1.TunedModelRef) obj;
if (!getTunedModelRefCase().equals(other.getTunedModelRefCase())) return false;
switch (tunedModelRefCase_) {
case 1:
if (!getTunedModel().equals(other.getTunedModel())) return false;
break;
case 2:
if (!getTuningJob().equals(other.getTuningJob())) return false;
break;
case 3:
if (!getPipelineJob().equals(other.getPipelineJob())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
switch (tunedModelRefCase_) {
case 1:
hash = (37 * hash) + TUNED_MODEL_FIELD_NUMBER;
hash = (53 * hash) + getTunedModel().hashCode();
break;
case 2:
hash = (37 * hash) + TUNING_JOB_FIELD_NUMBER;
hash = (53 * hash) + getTuningJob().hashCode();
break;
case 3:
hash = (37 * hash) + PIPELINE_JOB_FIELD_NUMBER;
hash = (53 * hash) + getPipelineJob().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.TunedModelRef parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.TunedModelRef parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.TunedModelRef parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.TunedModelRef parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.TunedModelRef parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.TunedModelRef parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.TunedModelRef parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.TunedModelRef parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.TunedModelRef parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.TunedModelRef parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.TunedModelRef parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.TunedModelRef parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.aiplatform.v1.TunedModelRef prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* TunedModel Reference for legacy model migration.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.TunedModelRef}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.TunedModelRef)
com.google.cloud.aiplatform.v1.TunedModelRefOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.TuningJobProto
.internal_static_google_cloud_aiplatform_v1_TunedModelRef_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.TuningJobProto
.internal_static_google_cloud_aiplatform_v1_TunedModelRef_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.TunedModelRef.class,
com.google.cloud.aiplatform.v1.TunedModelRef.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.TunedModelRef.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
tunedModelRefCase_ = 0;
tunedModelRef_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.TuningJobProto
.internal_static_google_cloud_aiplatform_v1_TunedModelRef_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.TunedModelRef getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.TunedModelRef.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.TunedModelRef build() {
com.google.cloud.aiplatform.v1.TunedModelRef result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.TunedModelRef buildPartial() {
com.google.cloud.aiplatform.v1.TunedModelRef result =
new com.google.cloud.aiplatform.v1.TunedModelRef(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.aiplatform.v1.TunedModelRef result) {
int from_bitField0_ = bitField0_;
}
private void buildPartialOneofs(com.google.cloud.aiplatform.v1.TunedModelRef result) {
result.tunedModelRefCase_ = tunedModelRefCase_;
result.tunedModelRef_ = this.tunedModelRef_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.TunedModelRef) {
return mergeFrom((com.google.cloud.aiplatform.v1.TunedModelRef) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.TunedModelRef other) {
if (other == com.google.cloud.aiplatform.v1.TunedModelRef.getDefaultInstance()) return this;
switch (other.getTunedModelRefCase()) {
case TUNED_MODEL:
{
tunedModelRefCase_ = 1;
tunedModelRef_ = other.tunedModelRef_;
onChanged();
break;
}
case TUNING_JOB:
{
tunedModelRefCase_ = 2;
tunedModelRef_ = other.tunedModelRef_;
onChanged();
break;
}
case PIPELINE_JOB:
{
tunedModelRefCase_ = 3;
tunedModelRef_ = other.tunedModelRef_;
onChanged();
break;
}
case TUNEDMODELREF_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
tunedModelRefCase_ = 1;
tunedModelRef_ = s;
break;
} // case 10
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
tunedModelRefCase_ = 2;
tunedModelRef_ = s;
break;
} // case 18
case 26:
{
java.lang.String s = input.readStringRequireUtf8();
tunedModelRefCase_ = 3;
tunedModelRef_ = s;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int tunedModelRefCase_ = 0;
private java.lang.Object tunedModelRef_;
public TunedModelRefCase getTunedModelRefCase() {
return TunedModelRefCase.forNumber(tunedModelRefCase_);
}
public Builder clearTunedModelRef() {
tunedModelRefCase_ = 0;
tunedModelRef_ = null;
onChanged();
return this;
}
private int bitField0_;
/**
*
*
* <pre>
* Support migration from model registry.
* </pre>
*
* <code>string tuned_model = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return Whether the tunedModel field is set.
*/
@java.lang.Override
public boolean hasTunedModel() {
return tunedModelRefCase_ == 1;
}
/**
*
*
* <pre>
* Support migration from model registry.
* </pre>
*
* <code>string tuned_model = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return The tunedModel.
*/
@java.lang.Override
public java.lang.String getTunedModel() {
java.lang.Object ref = "";
if (tunedModelRefCase_ == 1) {
ref = tunedModelRef_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (tunedModelRefCase_ == 1) {
tunedModelRef_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Support migration from model registry.
* </pre>
*
* <code>string tuned_model = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return The bytes for tunedModel.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTunedModelBytes() {
java.lang.Object ref = "";
if (tunedModelRefCase_ == 1) {
ref = tunedModelRef_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (tunedModelRefCase_ == 1) {
tunedModelRef_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Support migration from model registry.
* </pre>
*
* <code>string tuned_model = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @param value The tunedModel to set.
* @return This builder for chaining.
*/
public Builder setTunedModel(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
tunedModelRefCase_ = 1;
tunedModelRef_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Support migration from model registry.
* </pre>
*
* <code>string tuned_model = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return This builder for chaining.
*/
public Builder clearTunedModel() {
if (tunedModelRefCase_ == 1) {
tunedModelRefCase_ = 0;
tunedModelRef_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Support migration from model registry.
* </pre>
*
* <code>string tuned_model = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @param value The bytes for tunedModel to set.
* @return This builder for chaining.
*/
public Builder setTunedModelBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
tunedModelRefCase_ = 1;
tunedModelRef_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Support migration from tuning job list page, from gemini-1.0-pro-002
* to 1.5 and above.
* </pre>
*
* <code>string tuning_job = 2 [(.google.api.resource_reference) = { ... }</code>
*
* @return Whether the tuningJob field is set.
*/
@java.lang.Override
public boolean hasTuningJob() {
return tunedModelRefCase_ == 2;
}
/**
*
*
* <pre>
* Support migration from tuning job list page, from gemini-1.0-pro-002
* to 1.5 and above.
* </pre>
*
* <code>string tuning_job = 2 [(.google.api.resource_reference) = { ... }</code>
*
* @return The tuningJob.
*/
@java.lang.Override
public java.lang.String getTuningJob() {
java.lang.Object ref = "";
if (tunedModelRefCase_ == 2) {
ref = tunedModelRef_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (tunedModelRefCase_ == 2) {
tunedModelRef_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Support migration from tuning job list page, from gemini-1.0-pro-002
* to 1.5 and above.
* </pre>
*
* <code>string tuning_job = 2 [(.google.api.resource_reference) = { ... }</code>
*
* @return The bytes for tuningJob.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTuningJobBytes() {
java.lang.Object ref = "";
if (tunedModelRefCase_ == 2) {
ref = tunedModelRef_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (tunedModelRefCase_ == 2) {
tunedModelRef_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Support migration from tuning job list page, from gemini-1.0-pro-002
* to 1.5 and above.
* </pre>
*
* <code>string tuning_job = 2 [(.google.api.resource_reference) = { ... }</code>
*
* @param value The tuningJob to set.
* @return This builder for chaining.
*/
public Builder setTuningJob(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
tunedModelRefCase_ = 2;
tunedModelRef_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Support migration from tuning job list page, from gemini-1.0-pro-002
* to 1.5 and above.
* </pre>
*
* <code>string tuning_job = 2 [(.google.api.resource_reference) = { ... }</code>
*
* @return This builder for chaining.
*/
public Builder clearTuningJob() {
if (tunedModelRefCase_ == 2) {
tunedModelRefCase_ = 0;
tunedModelRef_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Support migration from tuning job list page, from gemini-1.0-pro-002
* to 1.5 and above.
* </pre>
*
* <code>string tuning_job = 2 [(.google.api.resource_reference) = { ... }</code>
*
* @param value The bytes for tuningJob to set.
* @return This builder for chaining.
*/
public Builder setTuningJobBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
tunedModelRefCase_ = 2;
tunedModelRef_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Support migration from tuning job list page, from bison model to gemini
* model.
* </pre>
*
* <code>string pipeline_job = 3 [(.google.api.resource_reference) = { ... }</code>
*
* @return Whether the pipelineJob field is set.
*/
@java.lang.Override
public boolean hasPipelineJob() {
return tunedModelRefCase_ == 3;
}
/**
*
*
* <pre>
* Support migration from tuning job list page, from bison model to gemini
* model.
* </pre>
*
* <code>string pipeline_job = 3 [(.google.api.resource_reference) = { ... }</code>
*
* @return The pipelineJob.
*/
@java.lang.Override
public java.lang.String getPipelineJob() {
java.lang.Object ref = "";
if (tunedModelRefCase_ == 3) {
ref = tunedModelRef_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (tunedModelRefCase_ == 3) {
tunedModelRef_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Support migration from tuning job list page, from bison model to gemini
* model.
* </pre>
*
* <code>string pipeline_job = 3 [(.google.api.resource_reference) = { ... }</code>
*
* @return The bytes for pipelineJob.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPipelineJobBytes() {
java.lang.Object ref = "";
if (tunedModelRefCase_ == 3) {
ref = tunedModelRef_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (tunedModelRefCase_ == 3) {
tunedModelRef_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Support migration from tuning job list page, from bison model to gemini
* model.
* </pre>
*
* <code>string pipeline_job = 3 [(.google.api.resource_reference) = { ... }</code>
*
* @param value The pipelineJob to set.
* @return This builder for chaining.
*/
public Builder setPipelineJob(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
tunedModelRefCase_ = 3;
tunedModelRef_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Support migration from tuning job list page, from bison model to gemini
* model.
* </pre>
*
* <code>string pipeline_job = 3 [(.google.api.resource_reference) = { ... }</code>
*
* @return This builder for chaining.
*/
public Builder clearPipelineJob() {
if (tunedModelRefCase_ == 3) {
tunedModelRefCase_ = 0;
tunedModelRef_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Support migration from tuning job list page, from bison model to gemini
* model.
* </pre>
*
* <code>string pipeline_job = 3 [(.google.api.resource_reference) = { ... }</code>
*
* @param value The bytes for pipelineJob to set.
* @return This builder for chaining.
*/
public Builder setPipelineJobBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
tunedModelRefCase_ = 3;
tunedModelRef_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.TunedModelRef)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.TunedModelRef)
private static final com.google.cloud.aiplatform.v1.TunedModelRef DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.TunedModelRef();
}
public static com.google.cloud.aiplatform.v1.TunedModelRef getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TunedModelRef> PARSER =
new com.google.protobuf.AbstractParser<TunedModelRef>() {
@java.lang.Override
public TunedModelRef parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<TunedModelRef> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<TunedModelRef> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.TunedModelRef getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,007 | java-datalineage/proto-google-cloud-datalineage-v1/src/main/java/com/google/cloud/datacatalog/lineage/v1/UpdateRunRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datacatalog/lineage/v1/lineage.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datacatalog.lineage.v1;
/**
*
*
* <pre>
* Request message for
* [UpdateRun][google.cloud.datacatalog.lineage.v1.UpdateRun].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.lineage.v1.UpdateRunRequest}
*/
public final class UpdateRunRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datacatalog.lineage.v1.UpdateRunRequest)
UpdateRunRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateRunRequest.newBuilder() to construct.
private UpdateRunRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateRunRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateRunRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_UpdateRunRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_UpdateRunRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest.class,
com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest.Builder.class);
}
private int bitField0_;
public static final int RUN_FIELD_NUMBER = 1;
private com.google.cloud.datacatalog.lineage.v1.Run run_;
/**
*
*
* <pre>
* Required. The lineage run to update.
*
* The run's `name` field is used to identify the run to update.
*
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}`.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Run run = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the run field is set.
*/
@java.lang.Override
public boolean hasRun() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The lineage run to update.
*
* The run's `name` field is used to identify the run to update.
*
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}`.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Run run = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The run.
*/
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.Run getRun() {
return run_ == null ? com.google.cloud.datacatalog.lineage.v1.Run.getDefaultInstance() : run_;
}
/**
*
*
* <pre>
* Required. The lineage run to update.
*
* The run's `name` field is used to identify the run to update.
*
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}`.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Run run = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.RunOrBuilder getRunOrBuilder() {
return run_ == null ? com.google.cloud.datacatalog.lineage.v1.Run.getDefaultInstance() : run_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* The list of fields to update. Currently not used. The whole message is
* updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The list of fields to update. Currently not used. The whole message is
* updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* The list of fields to update. Currently not used. The whole message is
* updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
public static final int ALLOW_MISSING_FIELD_NUMBER = 3;
private boolean allowMissing_ = false;
/**
*
*
* <pre>
* If set to true and the run is not found, the request creates it.
* </pre>
*
* <code>bool allow_missing = 3;</code>
*
* @return The allowMissing.
*/
@java.lang.Override
public boolean getAllowMissing() {
return allowMissing_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getRun());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
if (allowMissing_ != false) {
output.writeBool(3, allowMissing_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getRun());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
if (allowMissing_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, allowMissing_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest)) {
return super.equals(obj);
}
com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest other =
(com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest) obj;
if (hasRun() != other.hasRun()) return false;
if (hasRun()) {
if (!getRun().equals(other.getRun())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (getAllowMissing() != other.getAllowMissing()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasRun()) {
hash = (37 * hash) + RUN_FIELD_NUMBER;
hash = (53 * hash) + getRun().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (37 * hash) + ALLOW_MISSING_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getAllowMissing());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [UpdateRun][google.cloud.datacatalog.lineage.v1.UpdateRun].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.lineage.v1.UpdateRunRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.lineage.v1.UpdateRunRequest)
com.google.cloud.datacatalog.lineage.v1.UpdateRunRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_UpdateRunRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_UpdateRunRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest.class,
com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest.Builder.class);
}
// Construct using com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getRunFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
run_ = null;
if (runBuilder_ != null) {
runBuilder_.dispose();
runBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
allowMissing_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_UpdateRunRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest getDefaultInstanceForType() {
return com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest build() {
com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest buildPartial() {
com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest result =
new com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.run_ = runBuilder_ == null ? run_ : runBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.allowMissing_ = allowMissing_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest) {
return mergeFrom((com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest other) {
if (other == com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest.getDefaultInstance())
return this;
if (other.hasRun()) {
mergeRun(other.getRun());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
if (other.getAllowMissing() != false) {
setAllowMissing(other.getAllowMissing());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getRunFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
allowMissing_ = input.readBool();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.datacatalog.lineage.v1.Run run_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.lineage.v1.Run,
com.google.cloud.datacatalog.lineage.v1.Run.Builder,
com.google.cloud.datacatalog.lineage.v1.RunOrBuilder>
runBuilder_;
/**
*
*
* <pre>
* Required. The lineage run to update.
*
* The run's `name` field is used to identify the run to update.
*
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}`.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Run run = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the run field is set.
*/
public boolean hasRun() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The lineage run to update.
*
* The run's `name` field is used to identify the run to update.
*
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}`.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Run run = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The run.
*/
public com.google.cloud.datacatalog.lineage.v1.Run getRun() {
if (runBuilder_ == null) {
return run_ == null
? com.google.cloud.datacatalog.lineage.v1.Run.getDefaultInstance()
: run_;
} else {
return runBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The lineage run to update.
*
* The run's `name` field is used to identify the run to update.
*
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}`.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Run run = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setRun(com.google.cloud.datacatalog.lineage.v1.Run value) {
if (runBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
run_ = value;
} else {
runBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The lineage run to update.
*
* The run's `name` field is used to identify the run to update.
*
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}`.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Run run = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setRun(com.google.cloud.datacatalog.lineage.v1.Run.Builder builderForValue) {
if (runBuilder_ == null) {
run_ = builderForValue.build();
} else {
runBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The lineage run to update.
*
* The run's `name` field is used to identify the run to update.
*
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}`.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Run run = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeRun(com.google.cloud.datacatalog.lineage.v1.Run value) {
if (runBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& run_ != null
&& run_ != com.google.cloud.datacatalog.lineage.v1.Run.getDefaultInstance()) {
getRunBuilder().mergeFrom(value);
} else {
run_ = value;
}
} else {
runBuilder_.mergeFrom(value);
}
if (run_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The lineage run to update.
*
* The run's `name` field is used to identify the run to update.
*
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}`.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Run run = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearRun() {
bitField0_ = (bitField0_ & ~0x00000001);
run_ = null;
if (runBuilder_ != null) {
runBuilder_.dispose();
runBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The lineage run to update.
*
* The run's `name` field is used to identify the run to update.
*
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}`.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Run run = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.datacatalog.lineage.v1.Run.Builder getRunBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getRunFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The lineage run to update.
*
* The run's `name` field is used to identify the run to update.
*
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}`.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Run run = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.datacatalog.lineage.v1.RunOrBuilder getRunOrBuilder() {
if (runBuilder_ != null) {
return runBuilder_.getMessageOrBuilder();
} else {
return run_ == null
? com.google.cloud.datacatalog.lineage.v1.Run.getDefaultInstance()
: run_;
}
}
/**
*
*
* <pre>
* Required. The lineage run to update.
*
* The run's `name` field is used to identify the run to update.
*
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}`.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Run run = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.lineage.v1.Run,
com.google.cloud.datacatalog.lineage.v1.Run.Builder,
com.google.cloud.datacatalog.lineage.v1.RunOrBuilder>
getRunFieldBuilder() {
if (runBuilder_ == null) {
runBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.lineage.v1.Run,
com.google.cloud.datacatalog.lineage.v1.Run.Builder,
com.google.cloud.datacatalog.lineage.v1.RunOrBuilder>(
getRun(), getParentForChildren(), isClean());
run_ = null;
}
return runBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* The list of fields to update. Currently not used. The whole message is
* updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The list of fields to update. Currently not used. The whole message is
* updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The list of fields to update. Currently not used. The whole message is
* updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of fields to update. Currently not used. The whole message is
* updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of fields to update. Currently not used. The whole message is
* updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The list of fields to update. Currently not used. The whole message is
* updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of fields to update. Currently not used. The whole message is
* updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The list of fields to update. Currently not used. The whole message is
* updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* The list of fields to update. Currently not used. The whole message is
* updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private boolean allowMissing_;
/**
*
*
* <pre>
* If set to true and the run is not found, the request creates it.
* </pre>
*
* <code>bool allow_missing = 3;</code>
*
* @return The allowMissing.
*/
@java.lang.Override
public boolean getAllowMissing() {
return allowMissing_;
}
/**
*
*
* <pre>
* If set to true and the run is not found, the request creates it.
* </pre>
*
* <code>bool allow_missing = 3;</code>
*
* @param value The allowMissing to set.
* @return This builder for chaining.
*/
public Builder setAllowMissing(boolean value) {
allowMissing_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* If set to true and the run is not found, the request creates it.
* </pre>
*
* <code>bool allow_missing = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearAllowMissing() {
bitField0_ = (bitField0_ & ~0x00000004);
allowMissing_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.lineage.v1.UpdateRunRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.datacatalog.lineage.v1.UpdateRunRequest)
private static final com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest();
}
public static com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateRunRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateRunRequest>() {
@java.lang.Override
public UpdateRunRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateRunRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateRunRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,363 | java-licensemanager/google-cloud-licensemanager/src/main/java/com/google/cloud/licensemanager/v1/stub/GrpcLicenseManagerStub.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.licensemanager.v1.stub;
import static com.google.cloud.licensemanager.v1.LicenseManagerClient.AggregateUsagePagedResponse;
import static com.google.cloud.licensemanager.v1.LicenseManagerClient.ListConfigurationsPagedResponse;
import static com.google.cloud.licensemanager.v1.LicenseManagerClient.ListInstancesPagedResponse;
import static com.google.cloud.licensemanager.v1.LicenseManagerClient.ListLocationsPagedResponse;
import static com.google.cloud.licensemanager.v1.LicenseManagerClient.ListProductsPagedResponse;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.grpc.GrpcCallSettings;
import com.google.api.gax.grpc.GrpcStubCallableFactory;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.RequestParamsBuilder;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.licensemanager.v1.AggregateUsageRequest;
import com.google.cloud.licensemanager.v1.AggregateUsageResponse;
import com.google.cloud.licensemanager.v1.Configuration;
import com.google.cloud.licensemanager.v1.CreateConfigurationRequest;
import com.google.cloud.licensemanager.v1.DeactivateConfigurationRequest;
import com.google.cloud.licensemanager.v1.DeleteConfigurationRequest;
import com.google.cloud.licensemanager.v1.GetConfigurationRequest;
import com.google.cloud.licensemanager.v1.GetInstanceRequest;
import com.google.cloud.licensemanager.v1.GetProductRequest;
import com.google.cloud.licensemanager.v1.Instance;
import com.google.cloud.licensemanager.v1.ListConfigurationsRequest;
import com.google.cloud.licensemanager.v1.ListConfigurationsResponse;
import com.google.cloud.licensemanager.v1.ListInstancesRequest;
import com.google.cloud.licensemanager.v1.ListInstancesResponse;
import com.google.cloud.licensemanager.v1.ListProductsRequest;
import com.google.cloud.licensemanager.v1.ListProductsResponse;
import com.google.cloud.licensemanager.v1.OperationMetadata;
import com.google.cloud.licensemanager.v1.Product;
import com.google.cloud.licensemanager.v1.QueryConfigurationLicenseUsageRequest;
import com.google.cloud.licensemanager.v1.QueryConfigurationLicenseUsageResponse;
import com.google.cloud.licensemanager.v1.ReactivateConfigurationRequest;
import com.google.cloud.licensemanager.v1.UpdateConfigurationRequest;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.longrunning.Operation;
import com.google.longrunning.stub.GrpcOperationsStub;
import com.google.protobuf.Empty;
import io.grpc.MethodDescriptor;
import io.grpc.protobuf.ProtoUtils;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* gRPC stub implementation for the LicenseManager service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@Generated("by gapic-generator-java")
public class GrpcLicenseManagerStub extends LicenseManagerStub {
private static final MethodDescriptor<ListConfigurationsRequest, ListConfigurationsResponse>
listConfigurationsMethodDescriptor =
MethodDescriptor.<ListConfigurationsRequest, ListConfigurationsResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.licensemanager.v1.LicenseManager/ListConfigurations")
.setRequestMarshaller(
ProtoUtils.marshaller(ListConfigurationsRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(ListConfigurationsResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<GetConfigurationRequest, Configuration>
getConfigurationMethodDescriptor =
MethodDescriptor.<GetConfigurationRequest, Configuration>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.licensemanager.v1.LicenseManager/GetConfiguration")
.setRequestMarshaller(
ProtoUtils.marshaller(GetConfigurationRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Configuration.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<CreateConfigurationRequest, Operation>
createConfigurationMethodDescriptor =
MethodDescriptor.<CreateConfigurationRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.licensemanager.v1.LicenseManager/CreateConfiguration")
.setRequestMarshaller(
ProtoUtils.marshaller(CreateConfigurationRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<UpdateConfigurationRequest, Operation>
updateConfigurationMethodDescriptor =
MethodDescriptor.<UpdateConfigurationRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.licensemanager.v1.LicenseManager/UpdateConfiguration")
.setRequestMarshaller(
ProtoUtils.marshaller(UpdateConfigurationRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<DeleteConfigurationRequest, Operation>
deleteConfigurationMethodDescriptor =
MethodDescriptor.<DeleteConfigurationRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.licensemanager.v1.LicenseManager/DeleteConfiguration")
.setRequestMarshaller(
ProtoUtils.marshaller(DeleteConfigurationRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<ListInstancesRequest, ListInstancesResponse>
listInstancesMethodDescriptor =
MethodDescriptor.<ListInstancesRequest, ListInstancesResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.licensemanager.v1.LicenseManager/ListInstances")
.setRequestMarshaller(
ProtoUtils.marshaller(ListInstancesRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(ListInstancesResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<GetInstanceRequest, Instance> getInstanceMethodDescriptor =
MethodDescriptor.<GetInstanceRequest, Instance>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.licensemanager.v1.LicenseManager/GetInstance")
.setRequestMarshaller(ProtoUtils.marshaller(GetInstanceRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Instance.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<DeactivateConfigurationRequest, Operation>
deactivateConfigurationMethodDescriptor =
MethodDescriptor.<DeactivateConfigurationRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.licensemanager.v1.LicenseManager/DeactivateConfiguration")
.setRequestMarshaller(
ProtoUtils.marshaller(DeactivateConfigurationRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<ReactivateConfigurationRequest, Operation>
reactivateConfigurationMethodDescriptor =
MethodDescriptor.<ReactivateConfigurationRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.licensemanager.v1.LicenseManager/ReactivateConfiguration")
.setRequestMarshaller(
ProtoUtils.marshaller(ReactivateConfigurationRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<
QueryConfigurationLicenseUsageRequest, QueryConfigurationLicenseUsageResponse>
queryConfigurationLicenseUsageMethodDescriptor =
MethodDescriptor
.<QueryConfigurationLicenseUsageRequest, QueryConfigurationLicenseUsageResponse>
newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.licensemanager.v1.LicenseManager/QueryConfigurationLicenseUsage")
.setRequestMarshaller(
ProtoUtils.marshaller(QueryConfigurationLicenseUsageRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(
QueryConfigurationLicenseUsageResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<AggregateUsageRequest, AggregateUsageResponse>
aggregateUsageMethodDescriptor =
MethodDescriptor.<AggregateUsageRequest, AggregateUsageResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.licensemanager.v1.LicenseManager/AggregateUsage")
.setRequestMarshaller(
ProtoUtils.marshaller(AggregateUsageRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(AggregateUsageResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<ListProductsRequest, ListProductsResponse>
listProductsMethodDescriptor =
MethodDescriptor.<ListProductsRequest, ListProductsResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.licensemanager.v1.LicenseManager/ListProducts")
.setRequestMarshaller(ProtoUtils.marshaller(ListProductsRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(ListProductsResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<GetProductRequest, Product> getProductMethodDescriptor =
MethodDescriptor.<GetProductRequest, Product>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.licensemanager.v1.LicenseManager/GetProduct")
.setRequestMarshaller(ProtoUtils.marshaller(GetProductRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Product.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<ListLocationsRequest, ListLocationsResponse>
listLocationsMethodDescriptor =
MethodDescriptor.<ListLocationsRequest, ListLocationsResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.location.Locations/ListLocations")
.setRequestMarshaller(
ProtoUtils.marshaller(ListLocationsRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(ListLocationsResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<GetLocationRequest, Location> getLocationMethodDescriptor =
MethodDescriptor.<GetLocationRequest, Location>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.location.Locations/GetLocation")
.setRequestMarshaller(ProtoUtils.marshaller(GetLocationRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Location.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private final UnaryCallable<ListConfigurationsRequest, ListConfigurationsResponse>
listConfigurationsCallable;
private final UnaryCallable<ListConfigurationsRequest, ListConfigurationsPagedResponse>
listConfigurationsPagedCallable;
private final UnaryCallable<GetConfigurationRequest, Configuration> getConfigurationCallable;
private final UnaryCallable<CreateConfigurationRequest, Operation> createConfigurationCallable;
private final OperationCallable<CreateConfigurationRequest, Configuration, OperationMetadata>
createConfigurationOperationCallable;
private final UnaryCallable<UpdateConfigurationRequest, Operation> updateConfigurationCallable;
private final OperationCallable<UpdateConfigurationRequest, Configuration, OperationMetadata>
updateConfigurationOperationCallable;
private final UnaryCallable<DeleteConfigurationRequest, Operation> deleteConfigurationCallable;
private final OperationCallable<DeleteConfigurationRequest, Empty, OperationMetadata>
deleteConfigurationOperationCallable;
private final UnaryCallable<ListInstancesRequest, ListInstancesResponse> listInstancesCallable;
private final UnaryCallable<ListInstancesRequest, ListInstancesPagedResponse>
listInstancesPagedCallable;
private final UnaryCallable<GetInstanceRequest, Instance> getInstanceCallable;
private final UnaryCallable<DeactivateConfigurationRequest, Operation>
deactivateConfigurationCallable;
private final OperationCallable<DeactivateConfigurationRequest, Configuration, OperationMetadata>
deactivateConfigurationOperationCallable;
private final UnaryCallable<ReactivateConfigurationRequest, Operation>
reactivateConfigurationCallable;
private final OperationCallable<ReactivateConfigurationRequest, Configuration, OperationMetadata>
reactivateConfigurationOperationCallable;
private final UnaryCallable<
QueryConfigurationLicenseUsageRequest, QueryConfigurationLicenseUsageResponse>
queryConfigurationLicenseUsageCallable;
private final UnaryCallable<AggregateUsageRequest, AggregateUsageResponse> aggregateUsageCallable;
private final UnaryCallable<AggregateUsageRequest, AggregateUsagePagedResponse>
aggregateUsagePagedCallable;
private final UnaryCallable<ListProductsRequest, ListProductsResponse> listProductsCallable;
private final UnaryCallable<ListProductsRequest, ListProductsPagedResponse>
listProductsPagedCallable;
private final UnaryCallable<GetProductRequest, Product> getProductCallable;
private final UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable;
private final UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse>
listLocationsPagedCallable;
private final UnaryCallable<GetLocationRequest, Location> getLocationCallable;
private final BackgroundResource backgroundResources;
private final GrpcOperationsStub operationsStub;
private final GrpcStubCallableFactory callableFactory;
public static final GrpcLicenseManagerStub create(LicenseManagerStubSettings settings)
throws IOException {
return new GrpcLicenseManagerStub(settings, ClientContext.create(settings));
}
public static final GrpcLicenseManagerStub create(ClientContext clientContext)
throws IOException {
return new GrpcLicenseManagerStub(
LicenseManagerStubSettings.newBuilder().build(), clientContext);
}
public static final GrpcLicenseManagerStub create(
ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException {
return new GrpcLicenseManagerStub(
LicenseManagerStubSettings.newBuilder().build(), clientContext, callableFactory);
}
/**
* Constructs an instance of GrpcLicenseManagerStub, using the given settings. This is protected
* so that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected GrpcLicenseManagerStub(LicenseManagerStubSettings settings, ClientContext clientContext)
throws IOException {
this(settings, clientContext, new GrpcLicenseManagerCallableFactory());
}
/**
* Constructs an instance of GrpcLicenseManagerStub, using the given settings. This is protected
* so that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected GrpcLicenseManagerStub(
LicenseManagerStubSettings settings,
ClientContext clientContext,
GrpcStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory);
GrpcCallSettings<ListConfigurationsRequest, ListConfigurationsResponse>
listConfigurationsTransportSettings =
GrpcCallSettings.<ListConfigurationsRequest, ListConfigurationsResponse>newBuilder()
.setMethodDescriptor(listConfigurationsMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<GetConfigurationRequest, Configuration> getConfigurationTransportSettings =
GrpcCallSettings.<GetConfigurationRequest, Configuration>newBuilder()
.setMethodDescriptor(getConfigurationMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<CreateConfigurationRequest, Operation> createConfigurationTransportSettings =
GrpcCallSettings.<CreateConfigurationRequest, Operation>newBuilder()
.setMethodDescriptor(createConfigurationMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<UpdateConfigurationRequest, Operation> updateConfigurationTransportSettings =
GrpcCallSettings.<UpdateConfigurationRequest, Operation>newBuilder()
.setMethodDescriptor(updateConfigurationMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add(
"configuration.name", String.valueOf(request.getConfiguration().getName()));
return builder.build();
})
.build();
GrpcCallSettings<DeleteConfigurationRequest, Operation> deleteConfigurationTransportSettings =
GrpcCallSettings.<DeleteConfigurationRequest, Operation>newBuilder()
.setMethodDescriptor(deleteConfigurationMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<ListInstancesRequest, ListInstancesResponse> listInstancesTransportSettings =
GrpcCallSettings.<ListInstancesRequest, ListInstancesResponse>newBuilder()
.setMethodDescriptor(listInstancesMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<GetInstanceRequest, Instance> getInstanceTransportSettings =
GrpcCallSettings.<GetInstanceRequest, Instance>newBuilder()
.setMethodDescriptor(getInstanceMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<DeactivateConfigurationRequest, Operation>
deactivateConfigurationTransportSettings =
GrpcCallSettings.<DeactivateConfigurationRequest, Operation>newBuilder()
.setMethodDescriptor(deactivateConfigurationMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<ReactivateConfigurationRequest, Operation>
reactivateConfigurationTransportSettings =
GrpcCallSettings.<ReactivateConfigurationRequest, Operation>newBuilder()
.setMethodDescriptor(reactivateConfigurationMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<QueryConfigurationLicenseUsageRequest, QueryConfigurationLicenseUsageResponse>
queryConfigurationLicenseUsageTransportSettings =
GrpcCallSettings
.<QueryConfigurationLicenseUsageRequest, QueryConfigurationLicenseUsageResponse>
newBuilder()
.setMethodDescriptor(queryConfigurationLicenseUsageMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<AggregateUsageRequest, AggregateUsageResponse>
aggregateUsageTransportSettings =
GrpcCallSettings.<AggregateUsageRequest, AggregateUsageResponse>newBuilder()
.setMethodDescriptor(aggregateUsageMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<ListProductsRequest, ListProductsResponse> listProductsTransportSettings =
GrpcCallSettings.<ListProductsRequest, ListProductsResponse>newBuilder()
.setMethodDescriptor(listProductsMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<GetProductRequest, Product> getProductTransportSettings =
GrpcCallSettings.<GetProductRequest, Product>newBuilder()
.setMethodDescriptor(getProductMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<ListLocationsRequest, ListLocationsResponse> listLocationsTransportSettings =
GrpcCallSettings.<ListLocationsRequest, ListLocationsResponse>newBuilder()
.setMethodDescriptor(listLocationsMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<GetLocationRequest, Location> getLocationTransportSettings =
GrpcCallSettings.<GetLocationRequest, Location>newBuilder()
.setMethodDescriptor(getLocationMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
this.listConfigurationsCallable =
callableFactory.createUnaryCallable(
listConfigurationsTransportSettings,
settings.listConfigurationsSettings(),
clientContext);
this.listConfigurationsPagedCallable =
callableFactory.createPagedCallable(
listConfigurationsTransportSettings,
settings.listConfigurationsSettings(),
clientContext);
this.getConfigurationCallable =
callableFactory.createUnaryCallable(
getConfigurationTransportSettings, settings.getConfigurationSettings(), clientContext);
this.createConfigurationCallable =
callableFactory.createUnaryCallable(
createConfigurationTransportSettings,
settings.createConfigurationSettings(),
clientContext);
this.createConfigurationOperationCallable =
callableFactory.createOperationCallable(
createConfigurationTransportSettings,
settings.createConfigurationOperationSettings(),
clientContext,
operationsStub);
this.updateConfigurationCallable =
callableFactory.createUnaryCallable(
updateConfigurationTransportSettings,
settings.updateConfigurationSettings(),
clientContext);
this.updateConfigurationOperationCallable =
callableFactory.createOperationCallable(
updateConfigurationTransportSettings,
settings.updateConfigurationOperationSettings(),
clientContext,
operationsStub);
this.deleteConfigurationCallable =
callableFactory.createUnaryCallable(
deleteConfigurationTransportSettings,
settings.deleteConfigurationSettings(),
clientContext);
this.deleteConfigurationOperationCallable =
callableFactory.createOperationCallable(
deleteConfigurationTransportSettings,
settings.deleteConfigurationOperationSettings(),
clientContext,
operationsStub);
this.listInstancesCallable =
callableFactory.createUnaryCallable(
listInstancesTransportSettings, settings.listInstancesSettings(), clientContext);
this.listInstancesPagedCallable =
callableFactory.createPagedCallable(
listInstancesTransportSettings, settings.listInstancesSettings(), clientContext);
this.getInstanceCallable =
callableFactory.createUnaryCallable(
getInstanceTransportSettings, settings.getInstanceSettings(), clientContext);
this.deactivateConfigurationCallable =
callableFactory.createUnaryCallable(
deactivateConfigurationTransportSettings,
settings.deactivateConfigurationSettings(),
clientContext);
this.deactivateConfigurationOperationCallable =
callableFactory.createOperationCallable(
deactivateConfigurationTransportSettings,
settings.deactivateConfigurationOperationSettings(),
clientContext,
operationsStub);
this.reactivateConfigurationCallable =
callableFactory.createUnaryCallable(
reactivateConfigurationTransportSettings,
settings.reactivateConfigurationSettings(),
clientContext);
this.reactivateConfigurationOperationCallable =
callableFactory.createOperationCallable(
reactivateConfigurationTransportSettings,
settings.reactivateConfigurationOperationSettings(),
clientContext,
operationsStub);
this.queryConfigurationLicenseUsageCallable =
callableFactory.createUnaryCallable(
queryConfigurationLicenseUsageTransportSettings,
settings.queryConfigurationLicenseUsageSettings(),
clientContext);
this.aggregateUsageCallable =
callableFactory.createUnaryCallable(
aggregateUsageTransportSettings, settings.aggregateUsageSettings(), clientContext);
this.aggregateUsagePagedCallable =
callableFactory.createPagedCallable(
aggregateUsageTransportSettings, settings.aggregateUsageSettings(), clientContext);
this.listProductsCallable =
callableFactory.createUnaryCallable(
listProductsTransportSettings, settings.listProductsSettings(), clientContext);
this.listProductsPagedCallable =
callableFactory.createPagedCallable(
listProductsTransportSettings, settings.listProductsSettings(), clientContext);
this.getProductCallable =
callableFactory.createUnaryCallable(
getProductTransportSettings, settings.getProductSettings(), clientContext);
this.listLocationsCallable =
callableFactory.createUnaryCallable(
listLocationsTransportSettings, settings.listLocationsSettings(), clientContext);
this.listLocationsPagedCallable =
callableFactory.createPagedCallable(
listLocationsTransportSettings, settings.listLocationsSettings(), clientContext);
this.getLocationCallable =
callableFactory.createUnaryCallable(
getLocationTransportSettings, settings.getLocationSettings(), clientContext);
this.backgroundResources =
new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
public GrpcOperationsStub getOperationsStub() {
return operationsStub;
}
@Override
public UnaryCallable<ListConfigurationsRequest, ListConfigurationsResponse>
listConfigurationsCallable() {
return listConfigurationsCallable;
}
@Override
public UnaryCallable<ListConfigurationsRequest, ListConfigurationsPagedResponse>
listConfigurationsPagedCallable() {
return listConfigurationsPagedCallable;
}
@Override
public UnaryCallable<GetConfigurationRequest, Configuration> getConfigurationCallable() {
return getConfigurationCallable;
}
@Override
public UnaryCallable<CreateConfigurationRequest, Operation> createConfigurationCallable() {
return createConfigurationCallable;
}
@Override
public OperationCallable<CreateConfigurationRequest, Configuration, OperationMetadata>
createConfigurationOperationCallable() {
return createConfigurationOperationCallable;
}
@Override
public UnaryCallable<UpdateConfigurationRequest, Operation> updateConfigurationCallable() {
return updateConfigurationCallable;
}
@Override
public OperationCallable<UpdateConfigurationRequest, Configuration, OperationMetadata>
updateConfigurationOperationCallable() {
return updateConfigurationOperationCallable;
}
@Override
public UnaryCallable<DeleteConfigurationRequest, Operation> deleteConfigurationCallable() {
return deleteConfigurationCallable;
}
@Override
public OperationCallable<DeleteConfigurationRequest, Empty, OperationMetadata>
deleteConfigurationOperationCallable() {
return deleteConfigurationOperationCallable;
}
@Override
public UnaryCallable<ListInstancesRequest, ListInstancesResponse> listInstancesCallable() {
return listInstancesCallable;
}
@Override
public UnaryCallable<ListInstancesRequest, ListInstancesPagedResponse>
listInstancesPagedCallable() {
return listInstancesPagedCallable;
}
@Override
public UnaryCallable<GetInstanceRequest, Instance> getInstanceCallable() {
return getInstanceCallable;
}
@Override
public UnaryCallable<DeactivateConfigurationRequest, Operation>
deactivateConfigurationCallable() {
return deactivateConfigurationCallable;
}
@Override
public OperationCallable<DeactivateConfigurationRequest, Configuration, OperationMetadata>
deactivateConfigurationOperationCallable() {
return deactivateConfigurationOperationCallable;
}
@Override
public UnaryCallable<ReactivateConfigurationRequest, Operation>
reactivateConfigurationCallable() {
return reactivateConfigurationCallable;
}
@Override
public OperationCallable<ReactivateConfigurationRequest, Configuration, OperationMetadata>
reactivateConfigurationOperationCallable() {
return reactivateConfigurationOperationCallable;
}
@Override
public UnaryCallable<
QueryConfigurationLicenseUsageRequest, QueryConfigurationLicenseUsageResponse>
queryConfigurationLicenseUsageCallable() {
return queryConfigurationLicenseUsageCallable;
}
@Override
public UnaryCallable<AggregateUsageRequest, AggregateUsageResponse> aggregateUsageCallable() {
return aggregateUsageCallable;
}
@Override
public UnaryCallable<AggregateUsageRequest, AggregateUsagePagedResponse>
aggregateUsagePagedCallable() {
return aggregateUsagePagedCallable;
}
@Override
public UnaryCallable<ListProductsRequest, ListProductsResponse> listProductsCallable() {
return listProductsCallable;
}
@Override
public UnaryCallable<ListProductsRequest, ListProductsPagedResponse> listProductsPagedCallable() {
return listProductsPagedCallable;
}
@Override
public UnaryCallable<GetProductRequest, Product> getProductCallable() {
return getProductCallable;
}
@Override
public UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable() {
return listLocationsCallable;
}
@Override
public UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse>
listLocationsPagedCallable() {
return listLocationsPagedCallable;
}
@Override
public UnaryCallable<GetLocationRequest, Location> getLocationCallable() {
return getLocationCallable;
}
@Override
public final void close() {
try {
backgroundResources.close();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Failed to close resource", e);
}
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
|
googleapis/google-cloud-java | 37,115 | java-apikeys/proto-google-cloud-apikeys-v2/src/main/java/com/google/api/apikeys/v2/UpdateKeyRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/api/apikeys/v2/apikeys.proto
// Protobuf Java Version: 3.25.8
package com.google.api.apikeys.v2;
/**
*
*
* <pre>
* Request message for `UpdateKey` method.
* </pre>
*
* Protobuf type {@code google.api.apikeys.v2.UpdateKeyRequest}
*/
public final class UpdateKeyRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.api.apikeys.v2.UpdateKeyRequest)
UpdateKeyRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateKeyRequest.newBuilder() to construct.
private UpdateKeyRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateKeyRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateKeyRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.api.apikeys.v2.ApiKeysProto
.internal_static_google_api_apikeys_v2_UpdateKeyRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.apikeys.v2.ApiKeysProto
.internal_static_google_api_apikeys_v2_UpdateKeyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.apikeys.v2.UpdateKeyRequest.class,
com.google.api.apikeys.v2.UpdateKeyRequest.Builder.class);
}
private int bitField0_;
public static final int KEY_FIELD_NUMBER = 1;
private com.google.api.apikeys.v2.Key key_;
/**
*
*
* <pre>
* Required. Set the `name` field to the resource name of the API key to be
* updated. You can update only the `display_name`, `restrictions`, and
* `annotations` fields.
* </pre>
*
* <code>.google.api.apikeys.v2.Key key = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return Whether the key field is set.
*/
@java.lang.Override
public boolean hasKey() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Set the `name` field to the resource name of the API key to be
* updated. You can update only the `display_name`, `restrictions`, and
* `annotations` fields.
* </pre>
*
* <code>.google.api.apikeys.v2.Key key = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The key.
*/
@java.lang.Override
public com.google.api.apikeys.v2.Key getKey() {
return key_ == null ? com.google.api.apikeys.v2.Key.getDefaultInstance() : key_;
}
/**
*
*
* <pre>
* Required. Set the `name` field to the resource name of the API key to be
* updated. You can update only the `display_name`, `restrictions`, and
* `annotations` fields.
* </pre>
*
* <code>.google.api.apikeys.v2.Key key = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public com.google.api.apikeys.v2.KeyOrBuilder getKeyOrBuilder() {
return key_ == null ? com.google.api.apikeys.v2.Key.getDefaultInstance() : key_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* The field mask specifies which fields to be updated as part of this
* request. All other fields are ignored.
* Mutable fields are: `display_name`, `restrictions`, and `annotations`.
* If an update mask is not provided, the service treats it as an implied mask
* equivalent to all allowed fields that are set on the wire. If the field
* mask has a special value "*", the service treats it equivalent to replace
* all allowed mutable fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The field mask specifies which fields to be updated as part of this
* request. All other fields are ignored.
* Mutable fields are: `display_name`, `restrictions`, and `annotations`.
* If an update mask is not provided, the service treats it as an implied mask
* equivalent to all allowed fields that are set on the wire. If the field
* mask has a special value "*", the service treats it equivalent to replace
* all allowed mutable fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* The field mask specifies which fields to be updated as part of this
* request. All other fields are ignored.
* Mutable fields are: `display_name`, `restrictions`, and `annotations`.
* If an update mask is not provided, the service treats it as an implied mask
* equivalent to all allowed fields that are set on the wire. If the field
* mask has a special value "*", the service treats it equivalent to replace
* all allowed mutable fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getKey());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getKey());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.api.apikeys.v2.UpdateKeyRequest)) {
return super.equals(obj);
}
com.google.api.apikeys.v2.UpdateKeyRequest other =
(com.google.api.apikeys.v2.UpdateKeyRequest) obj;
if (hasKey() != other.hasKey()) return false;
if (hasKey()) {
if (!getKey().equals(other.getKey())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasKey()) {
hash = (37 * hash) + KEY_FIELD_NUMBER;
hash = (53 * hash) + getKey().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.api.apikeys.v2.UpdateKeyRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.apikeys.v2.UpdateKeyRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.apikeys.v2.UpdateKeyRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.apikeys.v2.UpdateKeyRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.apikeys.v2.UpdateKeyRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.apikeys.v2.UpdateKeyRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.apikeys.v2.UpdateKeyRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.api.apikeys.v2.UpdateKeyRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.api.apikeys.v2.UpdateKeyRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.api.apikeys.v2.UpdateKeyRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.api.apikeys.v2.UpdateKeyRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.api.apikeys.v2.UpdateKeyRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.api.apikeys.v2.UpdateKeyRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for `UpdateKey` method.
* </pre>
*
* Protobuf type {@code google.api.apikeys.v2.UpdateKeyRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.api.apikeys.v2.UpdateKeyRequest)
com.google.api.apikeys.v2.UpdateKeyRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.api.apikeys.v2.ApiKeysProto
.internal_static_google_api_apikeys_v2_UpdateKeyRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.apikeys.v2.ApiKeysProto
.internal_static_google_api_apikeys_v2_UpdateKeyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.apikeys.v2.UpdateKeyRequest.class,
com.google.api.apikeys.v2.UpdateKeyRequest.Builder.class);
}
// Construct using com.google.api.apikeys.v2.UpdateKeyRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getKeyFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
key_ = null;
if (keyBuilder_ != null) {
keyBuilder_.dispose();
keyBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.api.apikeys.v2.ApiKeysProto
.internal_static_google_api_apikeys_v2_UpdateKeyRequest_descriptor;
}
@java.lang.Override
public com.google.api.apikeys.v2.UpdateKeyRequest getDefaultInstanceForType() {
return com.google.api.apikeys.v2.UpdateKeyRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.api.apikeys.v2.UpdateKeyRequest build() {
com.google.api.apikeys.v2.UpdateKeyRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.api.apikeys.v2.UpdateKeyRequest buildPartial() {
com.google.api.apikeys.v2.UpdateKeyRequest result =
new com.google.api.apikeys.v2.UpdateKeyRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.api.apikeys.v2.UpdateKeyRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.key_ = keyBuilder_ == null ? key_ : keyBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.api.apikeys.v2.UpdateKeyRequest) {
return mergeFrom((com.google.api.apikeys.v2.UpdateKeyRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.api.apikeys.v2.UpdateKeyRequest other) {
if (other == com.google.api.apikeys.v2.UpdateKeyRequest.getDefaultInstance()) return this;
if (other.hasKey()) {
mergeKey(other.getKey());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getKeyFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.api.apikeys.v2.Key key_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.api.apikeys.v2.Key,
com.google.api.apikeys.v2.Key.Builder,
com.google.api.apikeys.v2.KeyOrBuilder>
keyBuilder_;
/**
*
*
* <pre>
* Required. Set the `name` field to the resource name of the API key to be
* updated. You can update only the `display_name`, `restrictions`, and
* `annotations` fields.
* </pre>
*
* <code>.google.api.apikeys.v2.Key key = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return Whether the key field is set.
*/
public boolean hasKey() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Set the `name` field to the resource name of the API key to be
* updated. You can update only the `display_name`, `restrictions`, and
* `annotations` fields.
* </pre>
*
* <code>.google.api.apikeys.v2.Key key = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The key.
*/
public com.google.api.apikeys.v2.Key getKey() {
if (keyBuilder_ == null) {
return key_ == null ? com.google.api.apikeys.v2.Key.getDefaultInstance() : key_;
} else {
return keyBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Set the `name` field to the resource name of the API key to be
* updated. You can update only the `display_name`, `restrictions`, and
* `annotations` fields.
* </pre>
*
* <code>.google.api.apikeys.v2.Key key = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder setKey(com.google.api.apikeys.v2.Key value) {
if (keyBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
key_ = value;
} else {
keyBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Set the `name` field to the resource name of the API key to be
* updated. You can update only the `display_name`, `restrictions`, and
* `annotations` fields.
* </pre>
*
* <code>.google.api.apikeys.v2.Key key = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder setKey(com.google.api.apikeys.v2.Key.Builder builderForValue) {
if (keyBuilder_ == null) {
key_ = builderForValue.build();
} else {
keyBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Set the `name` field to the resource name of the API key to be
* updated. You can update only the `display_name`, `restrictions`, and
* `annotations` fields.
* </pre>
*
* <code>.google.api.apikeys.v2.Key key = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder mergeKey(com.google.api.apikeys.v2.Key value) {
if (keyBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& key_ != null
&& key_ != com.google.api.apikeys.v2.Key.getDefaultInstance()) {
getKeyBuilder().mergeFrom(value);
} else {
key_ = value;
}
} else {
keyBuilder_.mergeFrom(value);
}
if (key_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Set the `name` field to the resource name of the API key to be
* updated. You can update only the `display_name`, `restrictions`, and
* `annotations` fields.
* </pre>
*
* <code>.google.api.apikeys.v2.Key key = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder clearKey() {
bitField0_ = (bitField0_ & ~0x00000001);
key_ = null;
if (keyBuilder_ != null) {
keyBuilder_.dispose();
keyBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Set the `name` field to the resource name of the API key to be
* updated. You can update only the `display_name`, `restrictions`, and
* `annotations` fields.
* </pre>
*
* <code>.google.api.apikeys.v2.Key key = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.api.apikeys.v2.Key.Builder getKeyBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getKeyFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Set the `name` field to the resource name of the API key to be
* updated. You can update only the `display_name`, `restrictions`, and
* `annotations` fields.
* </pre>
*
* <code>.google.api.apikeys.v2.Key key = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.api.apikeys.v2.KeyOrBuilder getKeyOrBuilder() {
if (keyBuilder_ != null) {
return keyBuilder_.getMessageOrBuilder();
} else {
return key_ == null ? com.google.api.apikeys.v2.Key.getDefaultInstance() : key_;
}
}
/**
*
*
* <pre>
* Required. Set the `name` field to the resource name of the API key to be
* updated. You can update only the `display_name`, `restrictions`, and
* `annotations` fields.
* </pre>
*
* <code>.google.api.apikeys.v2.Key key = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.api.apikeys.v2.Key,
com.google.api.apikeys.v2.Key.Builder,
com.google.api.apikeys.v2.KeyOrBuilder>
getKeyFieldBuilder() {
if (keyBuilder_ == null) {
keyBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.api.apikeys.v2.Key,
com.google.api.apikeys.v2.Key.Builder,
com.google.api.apikeys.v2.KeyOrBuilder>(
getKey(), getParentForChildren(), isClean());
key_ = null;
}
return keyBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* The field mask specifies which fields to be updated as part of this
* request. All other fields are ignored.
* Mutable fields are: `display_name`, `restrictions`, and `annotations`.
* If an update mask is not provided, the service treats it as an implied mask
* equivalent to all allowed fields that are set on the wire. If the field
* mask has a special value "*", the service treats it equivalent to replace
* all allowed mutable fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The field mask specifies which fields to be updated as part of this
* request. All other fields are ignored.
* Mutable fields are: `display_name`, `restrictions`, and `annotations`.
* If an update mask is not provided, the service treats it as an implied mask
* equivalent to all allowed fields that are set on the wire. If the field
* mask has a special value "*", the service treats it equivalent to replace
* all allowed mutable fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The field mask specifies which fields to be updated as part of this
* request. All other fields are ignored.
* Mutable fields are: `display_name`, `restrictions`, and `annotations`.
* If an update mask is not provided, the service treats it as an implied mask
* equivalent to all allowed fields that are set on the wire. If the field
* mask has a special value "*", the service treats it equivalent to replace
* all allowed mutable fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The field mask specifies which fields to be updated as part of this
* request. All other fields are ignored.
* Mutable fields are: `display_name`, `restrictions`, and `annotations`.
* If an update mask is not provided, the service treats it as an implied mask
* equivalent to all allowed fields that are set on the wire. If the field
* mask has a special value "*", the service treats it equivalent to replace
* all allowed mutable fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The field mask specifies which fields to be updated as part of this
* request. All other fields are ignored.
* Mutable fields are: `display_name`, `restrictions`, and `annotations`.
* If an update mask is not provided, the service treats it as an implied mask
* equivalent to all allowed fields that are set on the wire. If the field
* mask has a special value "*", the service treats it equivalent to replace
* all allowed mutable fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The field mask specifies which fields to be updated as part of this
* request. All other fields are ignored.
* Mutable fields are: `display_name`, `restrictions`, and `annotations`.
* If an update mask is not provided, the service treats it as an implied mask
* equivalent to all allowed fields that are set on the wire. If the field
* mask has a special value "*", the service treats it equivalent to replace
* all allowed mutable fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The field mask specifies which fields to be updated as part of this
* request. All other fields are ignored.
* Mutable fields are: `display_name`, `restrictions`, and `annotations`.
* If an update mask is not provided, the service treats it as an implied mask
* equivalent to all allowed fields that are set on the wire. If the field
* mask has a special value "*", the service treats it equivalent to replace
* all allowed mutable fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The field mask specifies which fields to be updated as part of this
* request. All other fields are ignored.
* Mutable fields are: `display_name`, `restrictions`, and `annotations`.
* If an update mask is not provided, the service treats it as an implied mask
* equivalent to all allowed fields that are set on the wire. If the field
* mask has a special value "*", the service treats it equivalent to replace
* all allowed mutable fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* The field mask specifies which fields to be updated as part of this
* request. All other fields are ignored.
* Mutable fields are: `display_name`, `restrictions`, and `annotations`.
* If an update mask is not provided, the service treats it as an implied mask
* equivalent to all allowed fields that are set on the wire. If the field
* mask has a special value "*", the service treats it equivalent to replace
* all allowed mutable fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.api.apikeys.v2.UpdateKeyRequest)
}
// @@protoc_insertion_point(class_scope:google.api.apikeys.v2.UpdateKeyRequest)
private static final com.google.api.apikeys.v2.UpdateKeyRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.api.apikeys.v2.UpdateKeyRequest();
}
public static com.google.api.apikeys.v2.UpdateKeyRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateKeyRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateKeyRequest>() {
@java.lang.Override
public UpdateKeyRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateKeyRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateKeyRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.api.apikeys.v2.UpdateKeyRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.